1
2
3
4 package org.apache.hadoop.hbase.protobuf.generated;
5
6 public final class WALProtos {
7 private WALProtos() {}
8 public static void registerAllExtensions(
9 com.google.protobuf.ExtensionRegistry registry) {
10 }
11
12
13
14 public enum ScopeType
15 implements com.google.protobuf.ProtocolMessageEnum {
16
17
18
19 REPLICATION_SCOPE_LOCAL(0, 0),
20
21
22
23 REPLICATION_SCOPE_GLOBAL(1, 1),
24 ;
25
26
27
28
29 public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0;
30
31
32
33 public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1;
34
35
36 public final int getNumber() { return value; }
37
38 public static ScopeType valueOf(int value) {
39 switch (value) {
40 case 0: return REPLICATION_SCOPE_LOCAL;
41 case 1: return REPLICATION_SCOPE_GLOBAL;
42 default: return null;
43 }
44 }
45
46 public static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
47 internalGetValueMap() {
48 return internalValueMap;
49 }
50 private static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
51 internalValueMap =
52 new com.google.protobuf.Internal.EnumLiteMap<ScopeType>() {
53 public ScopeType findValueByNumber(int number) {
54 return ScopeType.valueOf(number);
55 }
56 };
57
58 public final com.google.protobuf.Descriptors.EnumValueDescriptor
59 getValueDescriptor() {
60 return getDescriptor().getValues().get(index);
61 }
62 public final com.google.protobuf.Descriptors.EnumDescriptor
63 getDescriptorForType() {
64 return getDescriptor();
65 }
66 public static final com.google.protobuf.Descriptors.EnumDescriptor
67 getDescriptor() {
68 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.getDescriptor().getEnumTypes().get(0);
69 }
70
71 private static final ScopeType[] VALUES = values();
72
73 public static ScopeType valueOf(
74 com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
75 if (desc.getType() != getDescriptor()) {
76 throw new java.lang.IllegalArgumentException(
77 "EnumValueDescriptor is not for this type.");
78 }
79 return VALUES[desc.getIndex()];
80 }
81
82 private final int index;
83 private final int value;
84
85 private ScopeType(int index, int value) {
86 this.index = index;
87 this.value = value;
88 }
89
90
91 }
92
93 public interface WALHeaderOrBuilder
94 extends com.google.protobuf.MessageOrBuilder {
95
96
97
98
99
100 boolean hasHasCompression();
101
102
103
104 boolean getHasCompression();
105
106
107
108
109
110 boolean hasEncryptionKey();
111
112
113
114 com.google.protobuf.ByteString getEncryptionKey();
115
116
117
118
119
120 boolean hasHasTagCompression();
121
122
123
124 boolean getHasTagCompression();
125
126
127
128
129
130 boolean hasWriterClsName();
131
132
133
134 java.lang.String getWriterClsName();
135
136
137
138 com.google.protobuf.ByteString
139 getWriterClsNameBytes();
140
141
142
143
144
145 boolean hasCellCodecClsName();
146
147
148
149 java.lang.String getCellCodecClsName();
150
151
152
153 com.google.protobuf.ByteString
154 getCellCodecClsNameBytes();
155 }
156
157
158
159 public static final class WALHeader extends
160 com.google.protobuf.GeneratedMessage
161 implements WALHeaderOrBuilder {
162
163 private WALHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
164 super(builder);
165 this.unknownFields = builder.getUnknownFields();
166 }
167 private WALHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
168
169 private static final WALHeader defaultInstance;
170 public static WALHeader getDefaultInstance() {
171 return defaultInstance;
172 }
173
174 public WALHeader getDefaultInstanceForType() {
175 return defaultInstance;
176 }
177
178 private final com.google.protobuf.UnknownFieldSet unknownFields;
179 @java.lang.Override
180 public final com.google.protobuf.UnknownFieldSet
181 getUnknownFields() {
182 return this.unknownFields;
183 }
184 private WALHeader(
185 com.google.protobuf.CodedInputStream input,
186 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
187 throws com.google.protobuf.InvalidProtocolBufferException {
188 initFields();
189 int mutable_bitField0_ = 0;
190 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
191 com.google.protobuf.UnknownFieldSet.newBuilder();
192 try {
193 boolean done = false;
194 while (!done) {
195 int tag = input.readTag();
196 switch (tag) {
197 case 0:
198 done = true;
199 break;
200 default: {
201 if (!parseUnknownField(input, unknownFields,
202 extensionRegistry, tag)) {
203 done = true;
204 }
205 break;
206 }
207 case 8: {
208 bitField0_ |= 0x00000001;
209 hasCompression_ = input.readBool();
210 break;
211 }
212 case 18: {
213 bitField0_ |= 0x00000002;
214 encryptionKey_ = input.readBytes();
215 break;
216 }
217 case 24: {
218 bitField0_ |= 0x00000004;
219 hasTagCompression_ = input.readBool();
220 break;
221 }
222 case 34: {
223 bitField0_ |= 0x00000008;
224 writerClsName_ = input.readBytes();
225 break;
226 }
227 case 42: {
228 bitField0_ |= 0x00000010;
229 cellCodecClsName_ = input.readBytes();
230 break;
231 }
232 }
233 }
234 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
235 throw e.setUnfinishedMessage(this);
236 } catch (java.io.IOException e) {
237 throw new com.google.protobuf.InvalidProtocolBufferException(
238 e.getMessage()).setUnfinishedMessage(this);
239 } finally {
240 this.unknownFields = unknownFields.build();
241 makeExtensionsImmutable();
242 }
243 }
244 public static final com.google.protobuf.Descriptors.Descriptor
245 getDescriptor() {
246 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_descriptor;
247 }
248
249 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
250 internalGetFieldAccessorTable() {
251 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_fieldAccessorTable
252 .ensureFieldAccessorsInitialized(
253 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder.class);
254 }
255
256 public static com.google.protobuf.Parser<WALHeader> PARSER =
257 new com.google.protobuf.AbstractParser<WALHeader>() {
258 public WALHeader parsePartialFrom(
259 com.google.protobuf.CodedInputStream input,
260 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
261 throws com.google.protobuf.InvalidProtocolBufferException {
262 return new WALHeader(input, extensionRegistry);
263 }
264 };
265
266 @java.lang.Override
267 public com.google.protobuf.Parser<WALHeader> getParserForType() {
268 return PARSER;
269 }
270
271 private int bitField0_;
272
273 public static final int HAS_COMPRESSION_FIELD_NUMBER = 1;
274 private boolean hasCompression_;
275
276
277
278 public boolean hasHasCompression() {
279 return ((bitField0_ & 0x00000001) == 0x00000001);
280 }
281
282
283
284 public boolean getHasCompression() {
285 return hasCompression_;
286 }
287
288
289 public static final int ENCRYPTION_KEY_FIELD_NUMBER = 2;
290 private com.google.protobuf.ByteString encryptionKey_;
291
292
293
294 public boolean hasEncryptionKey() {
295 return ((bitField0_ & 0x00000002) == 0x00000002);
296 }
297
298
299
300 public com.google.protobuf.ByteString getEncryptionKey() {
301 return encryptionKey_;
302 }
303
304
305 public static final int HAS_TAG_COMPRESSION_FIELD_NUMBER = 3;
306 private boolean hasTagCompression_;
307
308
309
310 public boolean hasHasTagCompression() {
311 return ((bitField0_ & 0x00000004) == 0x00000004);
312 }
313
314
315
316 public boolean getHasTagCompression() {
317 return hasTagCompression_;
318 }
319
320
321 public static final int WRITER_CLS_NAME_FIELD_NUMBER = 4;
322 private java.lang.Object writerClsName_;
323
324
325
326 public boolean hasWriterClsName() {
327 return ((bitField0_ & 0x00000008) == 0x00000008);
328 }
329
330
331
332 public java.lang.String getWriterClsName() {
333 java.lang.Object ref = writerClsName_;
334 if (ref instanceof java.lang.String) {
335 return (java.lang.String) ref;
336 } else {
337 com.google.protobuf.ByteString bs =
338 (com.google.protobuf.ByteString) ref;
339 java.lang.String s = bs.toStringUtf8();
340 if (bs.isValidUtf8()) {
341 writerClsName_ = s;
342 }
343 return s;
344 }
345 }
346
347
348
349 public com.google.protobuf.ByteString
350 getWriterClsNameBytes() {
351 java.lang.Object ref = writerClsName_;
352 if (ref instanceof java.lang.String) {
353 com.google.protobuf.ByteString b =
354 com.google.protobuf.ByteString.copyFromUtf8(
355 (java.lang.String) ref);
356 writerClsName_ = b;
357 return b;
358 } else {
359 return (com.google.protobuf.ByteString) ref;
360 }
361 }
362
363
364 public static final int CELL_CODEC_CLS_NAME_FIELD_NUMBER = 5;
365 private java.lang.Object cellCodecClsName_;
366
367
368
369 public boolean hasCellCodecClsName() {
370 return ((bitField0_ & 0x00000010) == 0x00000010);
371 }
372
373
374
375 public java.lang.String getCellCodecClsName() {
376 java.lang.Object ref = cellCodecClsName_;
377 if (ref instanceof java.lang.String) {
378 return (java.lang.String) ref;
379 } else {
380 com.google.protobuf.ByteString bs =
381 (com.google.protobuf.ByteString) ref;
382 java.lang.String s = bs.toStringUtf8();
383 if (bs.isValidUtf8()) {
384 cellCodecClsName_ = s;
385 }
386 return s;
387 }
388 }
389
390
391
392 public com.google.protobuf.ByteString
393 getCellCodecClsNameBytes() {
394 java.lang.Object ref = cellCodecClsName_;
395 if (ref instanceof java.lang.String) {
396 com.google.protobuf.ByteString b =
397 com.google.protobuf.ByteString.copyFromUtf8(
398 (java.lang.String) ref);
399 cellCodecClsName_ = b;
400 return b;
401 } else {
402 return (com.google.protobuf.ByteString) ref;
403 }
404 }
405
406 private void initFields() {
407 hasCompression_ = false;
408 encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
409 hasTagCompression_ = false;
410 writerClsName_ = "";
411 cellCodecClsName_ = "";
412 }
413 private byte memoizedIsInitialized = -1;
414 public final boolean isInitialized() {
415 byte isInitialized = memoizedIsInitialized;
416 if (isInitialized != -1) return isInitialized == 1;
417
418 memoizedIsInitialized = 1;
419 return true;
420 }
421
422 public void writeTo(com.google.protobuf.CodedOutputStream output)
423 throws java.io.IOException {
424 getSerializedSize();
425 if (((bitField0_ & 0x00000001) == 0x00000001)) {
426 output.writeBool(1, hasCompression_);
427 }
428 if (((bitField0_ & 0x00000002) == 0x00000002)) {
429 output.writeBytes(2, encryptionKey_);
430 }
431 if (((bitField0_ & 0x00000004) == 0x00000004)) {
432 output.writeBool(3, hasTagCompression_);
433 }
434 if (((bitField0_ & 0x00000008) == 0x00000008)) {
435 output.writeBytes(4, getWriterClsNameBytes());
436 }
437 if (((bitField0_ & 0x00000010) == 0x00000010)) {
438 output.writeBytes(5, getCellCodecClsNameBytes());
439 }
440 getUnknownFields().writeTo(output);
441 }
442
443 private int memoizedSerializedSize = -1;
444 public int getSerializedSize() {
445 int size = memoizedSerializedSize;
446 if (size != -1) return size;
447
448 size = 0;
449 if (((bitField0_ & 0x00000001) == 0x00000001)) {
450 size += com.google.protobuf.CodedOutputStream
451 .computeBoolSize(1, hasCompression_);
452 }
453 if (((bitField0_ & 0x00000002) == 0x00000002)) {
454 size += com.google.protobuf.CodedOutputStream
455 .computeBytesSize(2, encryptionKey_);
456 }
457 if (((bitField0_ & 0x00000004) == 0x00000004)) {
458 size += com.google.protobuf.CodedOutputStream
459 .computeBoolSize(3, hasTagCompression_);
460 }
461 if (((bitField0_ & 0x00000008) == 0x00000008)) {
462 size += com.google.protobuf.CodedOutputStream
463 .computeBytesSize(4, getWriterClsNameBytes());
464 }
465 if (((bitField0_ & 0x00000010) == 0x00000010)) {
466 size += com.google.protobuf.CodedOutputStream
467 .computeBytesSize(5, getCellCodecClsNameBytes());
468 }
469 size += getUnknownFields().getSerializedSize();
470 memoizedSerializedSize = size;
471 return size;
472 }
473
474 private static final long serialVersionUID = 0L;
475 @java.lang.Override
476 protected java.lang.Object writeReplace()
477 throws java.io.ObjectStreamException {
478 return super.writeReplace();
479 }
480
481 @java.lang.Override
482 public boolean equals(final java.lang.Object obj) {
483 if (obj == this) {
484 return true;
485 }
486 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)) {
487 return super.equals(obj);
488 }
489 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) obj;
490
491 boolean result = true;
492 result = result && (hasHasCompression() == other.hasHasCompression());
493 if (hasHasCompression()) {
494 result = result && (getHasCompression()
495 == other.getHasCompression());
496 }
497 result = result && (hasEncryptionKey() == other.hasEncryptionKey());
498 if (hasEncryptionKey()) {
499 result = result && getEncryptionKey()
500 .equals(other.getEncryptionKey());
501 }
502 result = result && (hasHasTagCompression() == other.hasHasTagCompression());
503 if (hasHasTagCompression()) {
504 result = result && (getHasTagCompression()
505 == other.getHasTagCompression());
506 }
507 result = result && (hasWriterClsName() == other.hasWriterClsName());
508 if (hasWriterClsName()) {
509 result = result && getWriterClsName()
510 .equals(other.getWriterClsName());
511 }
512 result = result && (hasCellCodecClsName() == other.hasCellCodecClsName());
513 if (hasCellCodecClsName()) {
514 result = result && getCellCodecClsName()
515 .equals(other.getCellCodecClsName());
516 }
517 result = result &&
518 getUnknownFields().equals(other.getUnknownFields());
519 return result;
520 }
521
522 private int memoizedHashCode = 0;
523 @java.lang.Override
524 public int hashCode() {
525 if (memoizedHashCode != 0) {
526 return memoizedHashCode;
527 }
528 int hash = 41;
529 hash = (19 * hash) + getDescriptorForType().hashCode();
530 if (hasHasCompression()) {
531 hash = (37 * hash) + HAS_COMPRESSION_FIELD_NUMBER;
532 hash = (53 * hash) + hashBoolean(getHasCompression());
533 }
534 if (hasEncryptionKey()) {
535 hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER;
536 hash = (53 * hash) + getEncryptionKey().hashCode();
537 }
538 if (hasHasTagCompression()) {
539 hash = (37 * hash) + HAS_TAG_COMPRESSION_FIELD_NUMBER;
540 hash = (53 * hash) + hashBoolean(getHasTagCompression());
541 }
542 if (hasWriterClsName()) {
543 hash = (37 * hash) + WRITER_CLS_NAME_FIELD_NUMBER;
544 hash = (53 * hash) + getWriterClsName().hashCode();
545 }
546 if (hasCellCodecClsName()) {
547 hash = (37 * hash) + CELL_CODEC_CLS_NAME_FIELD_NUMBER;
548 hash = (53 * hash) + getCellCodecClsName().hashCode();
549 }
550 hash = (29 * hash) + getUnknownFields().hashCode();
551 memoizedHashCode = hash;
552 return hash;
553 }
554
555 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
556 com.google.protobuf.ByteString data)
557 throws com.google.protobuf.InvalidProtocolBufferException {
558 return PARSER.parseFrom(data);
559 }
560 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
561 com.google.protobuf.ByteString data,
562 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
563 throws com.google.protobuf.InvalidProtocolBufferException {
564 return PARSER.parseFrom(data, extensionRegistry);
565 }
566 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(byte[] data)
567 throws com.google.protobuf.InvalidProtocolBufferException {
568 return PARSER.parseFrom(data);
569 }
570 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
571 byte[] data,
572 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
573 throws com.google.protobuf.InvalidProtocolBufferException {
574 return PARSER.parseFrom(data, extensionRegistry);
575 }
576 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(java.io.InputStream input)
577 throws java.io.IOException {
578 return PARSER.parseFrom(input);
579 }
580 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
581 java.io.InputStream input,
582 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
583 throws java.io.IOException {
584 return PARSER.parseFrom(input, extensionRegistry);
585 }
586 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(java.io.InputStream input)
587 throws java.io.IOException {
588 return PARSER.parseDelimitedFrom(input);
589 }
590 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(
591 java.io.InputStream input,
592 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
593 throws java.io.IOException {
594 return PARSER.parseDelimitedFrom(input, extensionRegistry);
595 }
596 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
597 com.google.protobuf.CodedInputStream input)
598 throws java.io.IOException {
599 return PARSER.parseFrom(input);
600 }
601 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
602 com.google.protobuf.CodedInputStream input,
603 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
604 throws java.io.IOException {
605 return PARSER.parseFrom(input, extensionRegistry);
606 }
607
608 public static Builder newBuilder() { return Builder.create(); }
609 public Builder newBuilderForType() { return newBuilder(); }
610 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader prototype) {
611 return newBuilder().mergeFrom(prototype);
612 }
613 public Builder toBuilder() { return newBuilder(this); }
614
615 @java.lang.Override
616 protected Builder newBuilderForType(
617 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
618 Builder builder = new Builder(parent);
619 return builder;
620 }
621
622
623
624 public static final class Builder extends
625 com.google.protobuf.GeneratedMessage.Builder<Builder>
626 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeaderOrBuilder {
627 public static final com.google.protobuf.Descriptors.Descriptor
628 getDescriptor() {
629 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_descriptor;
630 }
631
632 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
633 internalGetFieldAccessorTable() {
634 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_fieldAccessorTable
635 .ensureFieldAccessorsInitialized(
636 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder.class);
637 }
638
639
640 private Builder() {
641 maybeForceBuilderInitialization();
642 }
643
644 private Builder(
645 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
646 super(parent);
647 maybeForceBuilderInitialization();
648 }
649 private void maybeForceBuilderInitialization() {
650 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
651 }
652 }
653 private static Builder create() {
654 return new Builder();
655 }
656
657 public Builder clear() {
658 super.clear();
659 hasCompression_ = false;
660 bitField0_ = (bitField0_ & ~0x00000001);
661 encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
662 bitField0_ = (bitField0_ & ~0x00000002);
663 hasTagCompression_ = false;
664 bitField0_ = (bitField0_ & ~0x00000004);
665 writerClsName_ = "";
666 bitField0_ = (bitField0_ & ~0x00000008);
667 cellCodecClsName_ = "";
668 bitField0_ = (bitField0_ & ~0x00000010);
669 return this;
670 }
671
672 public Builder clone() {
673 return create().mergeFrom(buildPartial());
674 }
675
676 public com.google.protobuf.Descriptors.Descriptor
677 getDescriptorForType() {
678 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_descriptor;
679 }
680
681 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() {
682 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance();
683 }
684
685 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader build() {
686 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = buildPartial();
687 if (!result.isInitialized()) {
688 throw newUninitializedMessageException(result);
689 }
690 return result;
691 }
692
693 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader buildPartial() {
694 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader(this);
695 int from_bitField0_ = bitField0_;
696 int to_bitField0_ = 0;
697 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
698 to_bitField0_ |= 0x00000001;
699 }
700 result.hasCompression_ = hasCompression_;
701 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
702 to_bitField0_ |= 0x00000002;
703 }
704 result.encryptionKey_ = encryptionKey_;
705 if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
706 to_bitField0_ |= 0x00000004;
707 }
708 result.hasTagCompression_ = hasTagCompression_;
709 if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
710 to_bitField0_ |= 0x00000008;
711 }
712 result.writerClsName_ = writerClsName_;
713 if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
714 to_bitField0_ |= 0x00000010;
715 }
716 result.cellCodecClsName_ = cellCodecClsName_;
717 result.bitField0_ = to_bitField0_;
718 onBuilt();
719 return result;
720 }
721
722 public Builder mergeFrom(com.google.protobuf.Message other) {
723 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) {
724 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)other);
725 } else {
726 super.mergeFrom(other);
727 return this;
728 }
729 }
730
731 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other) {
732 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance()) return this;
733 if (other.hasHasCompression()) {
734 setHasCompression(other.getHasCompression());
735 }
736 if (other.hasEncryptionKey()) {
737 setEncryptionKey(other.getEncryptionKey());
738 }
739 if (other.hasHasTagCompression()) {
740 setHasTagCompression(other.getHasTagCompression());
741 }
742 if (other.hasWriterClsName()) {
743 bitField0_ |= 0x00000008;
744 writerClsName_ = other.writerClsName_;
745 onChanged();
746 }
747 if (other.hasCellCodecClsName()) {
748 bitField0_ |= 0x00000010;
749 cellCodecClsName_ = other.cellCodecClsName_;
750 onChanged();
751 }
752 this.mergeUnknownFields(other.getUnknownFields());
753 return this;
754 }
755
756 public final boolean isInitialized() {
757 return true;
758 }
759
760 public Builder mergeFrom(
761 com.google.protobuf.CodedInputStream input,
762 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
763 throws java.io.IOException {
764 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parsedMessage = null;
765 try {
766 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
767 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
768 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) e.getUnfinishedMessage();
769 throw e;
770 } finally {
771 if (parsedMessage != null) {
772 mergeFrom(parsedMessage);
773 }
774 }
775 return this;
776 }
777 private int bitField0_;
778
779
780 private boolean hasCompression_ ;
781
782
783
784 public boolean hasHasCompression() {
785 return ((bitField0_ & 0x00000001) == 0x00000001);
786 }
787
788
789
790 public boolean getHasCompression() {
791 return hasCompression_;
792 }
793
794
795
796 public Builder setHasCompression(boolean value) {
797 bitField0_ |= 0x00000001;
798 hasCompression_ = value;
799 onChanged();
800 return this;
801 }
802
803
804
805 public Builder clearHasCompression() {
806 bitField0_ = (bitField0_ & ~0x00000001);
807 hasCompression_ = false;
808 onChanged();
809 return this;
810 }
811
812
813 private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
814
815
816
817 public boolean hasEncryptionKey() {
818 return ((bitField0_ & 0x00000002) == 0x00000002);
819 }
820
821
822
823 public com.google.protobuf.ByteString getEncryptionKey() {
824 return encryptionKey_;
825 }
826
827
828
829 public Builder setEncryptionKey(com.google.protobuf.ByteString value) {
830 if (value == null) {
831 throw new NullPointerException();
832 }
833 bitField0_ |= 0x00000002;
834 encryptionKey_ = value;
835 onChanged();
836 return this;
837 }
838
839
840
841 public Builder clearEncryptionKey() {
842 bitField0_ = (bitField0_ & ~0x00000002);
843 encryptionKey_ = getDefaultInstance().getEncryptionKey();
844 onChanged();
845 return this;
846 }
847
848
849 private boolean hasTagCompression_ ;
850
851
852
853 public boolean hasHasTagCompression() {
854 return ((bitField0_ & 0x00000004) == 0x00000004);
855 }
856
857
858
859 public boolean getHasTagCompression() {
860 return hasTagCompression_;
861 }
862
863
864
865 public Builder setHasTagCompression(boolean value) {
866 bitField0_ |= 0x00000004;
867 hasTagCompression_ = value;
868 onChanged();
869 return this;
870 }
871
872
873
874 public Builder clearHasTagCompression() {
875 bitField0_ = (bitField0_ & ~0x00000004);
876 hasTagCompression_ = false;
877 onChanged();
878 return this;
879 }
880
881
882 private java.lang.Object writerClsName_ = "";
883
884
885
886 public boolean hasWriterClsName() {
887 return ((bitField0_ & 0x00000008) == 0x00000008);
888 }
889
890
891
892 public java.lang.String getWriterClsName() {
893 java.lang.Object ref = writerClsName_;
894 if (!(ref instanceof java.lang.String)) {
895 java.lang.String s = ((com.google.protobuf.ByteString) ref)
896 .toStringUtf8();
897 writerClsName_ = s;
898 return s;
899 } else {
900 return (java.lang.String) ref;
901 }
902 }
903
904
905
906 public com.google.protobuf.ByteString
907 getWriterClsNameBytes() {
908 java.lang.Object ref = writerClsName_;
909 if (ref instanceof String) {
910 com.google.protobuf.ByteString b =
911 com.google.protobuf.ByteString.copyFromUtf8(
912 (java.lang.String) ref);
913 writerClsName_ = b;
914 return b;
915 } else {
916 return (com.google.protobuf.ByteString) ref;
917 }
918 }
919
920
921
922 public Builder setWriterClsName(
923 java.lang.String value) {
924 if (value == null) {
925 throw new NullPointerException();
926 }
927 bitField0_ |= 0x00000008;
928 writerClsName_ = value;
929 onChanged();
930 return this;
931 }
932
933
934
935 public Builder clearWriterClsName() {
936 bitField0_ = (bitField0_ & ~0x00000008);
937 writerClsName_ = getDefaultInstance().getWriterClsName();
938 onChanged();
939 return this;
940 }
941
942
943
944 public Builder setWriterClsNameBytes(
945 com.google.protobuf.ByteString value) {
946 if (value == null) {
947 throw new NullPointerException();
948 }
949 bitField0_ |= 0x00000008;
950 writerClsName_ = value;
951 onChanged();
952 return this;
953 }
954
955
956 private java.lang.Object cellCodecClsName_ = "";
957
958
959
960 public boolean hasCellCodecClsName() {
961 return ((bitField0_ & 0x00000010) == 0x00000010);
962 }
963
964
965
966 public java.lang.String getCellCodecClsName() {
967 java.lang.Object ref = cellCodecClsName_;
968 if (!(ref instanceof java.lang.String)) {
969 java.lang.String s = ((com.google.protobuf.ByteString) ref)
970 .toStringUtf8();
971 cellCodecClsName_ = s;
972 return s;
973 } else {
974 return (java.lang.String) ref;
975 }
976 }
977
978
979
980 public com.google.protobuf.ByteString
981 getCellCodecClsNameBytes() {
982 java.lang.Object ref = cellCodecClsName_;
983 if (ref instanceof String) {
984 com.google.protobuf.ByteString b =
985 com.google.protobuf.ByteString.copyFromUtf8(
986 (java.lang.String) ref);
987 cellCodecClsName_ = b;
988 return b;
989 } else {
990 return (com.google.protobuf.ByteString) ref;
991 }
992 }
993
994
995
996 public Builder setCellCodecClsName(
997 java.lang.String value) {
998 if (value == null) {
999 throw new NullPointerException();
1000 }
1001 bitField0_ |= 0x00000010;
1002 cellCodecClsName_ = value;
1003 onChanged();
1004 return this;
1005 }
1006
1007
1008
1009 public Builder clearCellCodecClsName() {
1010 bitField0_ = (bitField0_ & ~0x00000010);
1011 cellCodecClsName_ = getDefaultInstance().getCellCodecClsName();
1012 onChanged();
1013 return this;
1014 }
1015
1016
1017
1018 public Builder setCellCodecClsNameBytes(
1019 com.google.protobuf.ByteString value) {
1020 if (value == null) {
1021 throw new NullPointerException();
1022 }
1023 bitField0_ |= 0x00000010;
1024 cellCodecClsName_ = value;
1025 onChanged();
1026 return this;
1027 }
1028
1029
1030 }
1031
1032 static {
1033 defaultInstance = new WALHeader(true);
1034 defaultInstance.initFields();
1035 }
1036
1037
1038 }
1039
1040 public interface WALKeyOrBuilder
1041 extends com.google.protobuf.MessageOrBuilder {
1042
1043
1044
1045
1046
1047 boolean hasEncodedRegionName();
1048
1049
1050
1051 com.google.protobuf.ByteString getEncodedRegionName();
1052
1053
1054
1055
1056
1057 boolean hasTableName();
1058
1059
1060
1061 com.google.protobuf.ByteString getTableName();
1062
1063
1064
1065
1066
1067 boolean hasLogSequenceNumber();
1068
1069
1070
1071 long getLogSequenceNumber();
1072
1073
1074
1075
1076
1077 boolean hasWriteTime();
1078
1079
1080
1081 long getWriteTime();
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095 @java.lang.Deprecated boolean hasClusterId();
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107 @java.lang.Deprecated org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId();
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119 @java.lang.Deprecated org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder();
1120
1121
1122
1123
1124
1125 java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope>
1126 getScopesList();
1127
1128
1129
1130 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index);
1131
1132
1133
1134 int getScopesCount();
1135
1136
1137
1138 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>
1139 getScopesOrBuilderList();
1140
1141
1142
1143 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
1144 int index);
1145
1146
1147
1148
1149
1150 boolean hasFollowingKvCount();
1151
1152
1153
1154 int getFollowingKvCount();
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID>
1167 getClusterIdsList();
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index);
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187 int getClusterIdsCount();
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>
1198 getClusterIdsOrBuilderList();
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder(
1209 int index);
1210
1211
1212
1213
1214
1215 boolean hasNonceGroup();
1216
1217
1218
1219 long getNonceGroup();
1220
1221
1222
1223
1224
1225 boolean hasNonce();
1226
1227
1228
1229 long getNonce();
1230
1231
1232
1233
1234
1235 boolean hasOrigSequenceNumber();
1236
1237
1238
1239 long getOrigSequenceNumber();
1240 }
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250 public static final class WALKey extends
1251 com.google.protobuf.GeneratedMessage
1252 implements WALKeyOrBuilder {
1253
1254 private WALKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1255 super(builder);
1256 this.unknownFields = builder.getUnknownFields();
1257 }
1258 private WALKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1259
1260 private static final WALKey defaultInstance;
1261 public static WALKey getDefaultInstance() {
1262 return defaultInstance;
1263 }
1264
1265 public WALKey getDefaultInstanceForType() {
1266 return defaultInstance;
1267 }
1268
1269 private final com.google.protobuf.UnknownFieldSet unknownFields;
1270 @java.lang.Override
1271 public final com.google.protobuf.UnknownFieldSet
1272 getUnknownFields() {
1273 return this.unknownFields;
1274 }
1275 private WALKey(
1276 com.google.protobuf.CodedInputStream input,
1277 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1278 throws com.google.protobuf.InvalidProtocolBufferException {
1279 initFields();
1280 int mutable_bitField0_ = 0;
1281 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1282 com.google.protobuf.UnknownFieldSet.newBuilder();
1283 try {
1284 boolean done = false;
1285 while (!done) {
1286 int tag = input.readTag();
1287 switch (tag) {
1288 case 0:
1289 done = true;
1290 break;
1291 default: {
1292 if (!parseUnknownField(input, unknownFields,
1293 extensionRegistry, tag)) {
1294 done = true;
1295 }
1296 break;
1297 }
1298 case 10: {
1299 bitField0_ |= 0x00000001;
1300 encodedRegionName_ = input.readBytes();
1301 break;
1302 }
1303 case 18: {
1304 bitField0_ |= 0x00000002;
1305 tableName_ = input.readBytes();
1306 break;
1307 }
1308 case 24: {
1309 bitField0_ |= 0x00000004;
1310 logSequenceNumber_ = input.readUInt64();
1311 break;
1312 }
1313 case 32: {
1314 bitField0_ |= 0x00000008;
1315 writeTime_ = input.readUInt64();
1316 break;
1317 }
1318 case 42: {
1319 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder subBuilder = null;
1320 if (((bitField0_ & 0x00000010) == 0x00000010)) {
1321 subBuilder = clusterId_.toBuilder();
1322 }
1323 clusterId_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry);
1324 if (subBuilder != null) {
1325 subBuilder.mergeFrom(clusterId_);
1326 clusterId_ = subBuilder.buildPartial();
1327 }
1328 bitField0_ |= 0x00000010;
1329 break;
1330 }
1331 case 50: {
1332 if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
1333 scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope>();
1334 mutable_bitField0_ |= 0x00000020;
1335 }
1336 scopes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.PARSER, extensionRegistry));
1337 break;
1338 }
1339 case 56: {
1340 bitField0_ |= 0x00000020;
1341 followingKvCount_ = input.readUInt32();
1342 break;
1343 }
1344 case 66: {
1345 if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
1346 clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID>();
1347 mutable_bitField0_ |= 0x00000080;
1348 }
1349 clusterIds_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry));
1350 break;
1351 }
1352 case 72: {
1353 bitField0_ |= 0x00000040;
1354 nonceGroup_ = input.readUInt64();
1355 break;
1356 }
1357 case 80: {
1358 bitField0_ |= 0x00000080;
1359 nonce_ = input.readUInt64();
1360 break;
1361 }
1362 case 88: {
1363 bitField0_ |= 0x00000100;
1364 origSequenceNumber_ = input.readUInt64();
1365 break;
1366 }
1367 }
1368 }
1369 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1370 throw e.setUnfinishedMessage(this);
1371 } catch (java.io.IOException e) {
1372 throw new com.google.protobuf.InvalidProtocolBufferException(
1373 e.getMessage()).setUnfinishedMessage(this);
1374 } finally {
1375 if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
1376 scopes_ = java.util.Collections.unmodifiableList(scopes_);
1377 }
1378 if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
1379 clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_);
1380 }
1381 this.unknownFields = unknownFields.build();
1382 makeExtensionsImmutable();
1383 }
1384 }
1385 public static final com.google.protobuf.Descriptors.Descriptor
1386 getDescriptor() {
1387 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_descriptor;
1388 }
1389
1390 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1391 internalGetFieldAccessorTable() {
1392 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_fieldAccessorTable
1393 .ensureFieldAccessorsInitialized(
1394 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class);
1395 }
1396
1397 public static com.google.protobuf.Parser<WALKey> PARSER =
1398 new com.google.protobuf.AbstractParser<WALKey>() {
1399 public WALKey parsePartialFrom(
1400 com.google.protobuf.CodedInputStream input,
1401 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1402 throws com.google.protobuf.InvalidProtocolBufferException {
1403 return new WALKey(input, extensionRegistry);
1404 }
1405 };
1406
1407 @java.lang.Override
1408 public com.google.protobuf.Parser<WALKey> getParserForType() {
1409 return PARSER;
1410 }
1411
1412 private int bitField0_;
1413
1414 public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 1;
1415 private com.google.protobuf.ByteString encodedRegionName_;
1416
1417
1418
1419 public boolean hasEncodedRegionName() {
1420 return ((bitField0_ & 0x00000001) == 0x00000001);
1421 }
1422
1423
1424
1425 public com.google.protobuf.ByteString getEncodedRegionName() {
1426 return encodedRegionName_;
1427 }
1428
1429
1430 public static final int TABLE_NAME_FIELD_NUMBER = 2;
1431 private com.google.protobuf.ByteString tableName_;
1432
1433
1434
1435 public boolean hasTableName() {
1436 return ((bitField0_ & 0x00000002) == 0x00000002);
1437 }
1438
1439
1440
1441 public com.google.protobuf.ByteString getTableName() {
1442 return tableName_;
1443 }
1444
1445
1446 public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 3;
1447 private long logSequenceNumber_;
1448
1449
1450
1451 public boolean hasLogSequenceNumber() {
1452 return ((bitField0_ & 0x00000004) == 0x00000004);
1453 }
1454
1455
1456
1457 public long getLogSequenceNumber() {
1458 return logSequenceNumber_;
1459 }
1460
1461
1462 public static final int WRITE_TIME_FIELD_NUMBER = 4;
1463 private long writeTime_;
1464
1465
1466
1467 public boolean hasWriteTime() {
1468 return ((bitField0_ & 0x00000008) == 0x00000008);
1469 }
1470
1471
1472
1473 public long getWriteTime() {
1474 return writeTime_;
1475 }
1476
1477
1478 public static final int CLUSTER_ID_FIELD_NUMBER = 5;
1479 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_;
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491 @java.lang.Deprecated public boolean hasClusterId() {
1492 return ((bitField0_ & 0x00000010) == 0x00000010);
1493 }
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505 @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() {
1506 return clusterId_;
1507 }
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519 @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() {
1520 return clusterId_;
1521 }
1522
1523
1524 public static final int SCOPES_FIELD_NUMBER = 6;
1525 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> scopes_;
1526
1527
1528
1529 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList() {
1530 return scopes_;
1531 }
1532
1533
1534
1535 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>
1536 getScopesOrBuilderList() {
1537 return scopes_;
1538 }
1539
1540
1541
1542 public int getScopesCount() {
1543 return scopes_.size();
1544 }
1545
1546
1547
1548 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) {
1549 return scopes_.get(index);
1550 }
1551
1552
1553
1554 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
1555 int index) {
1556 return scopes_.get(index);
1557 }
1558
1559
1560 public static final int FOLLOWING_KV_COUNT_FIELD_NUMBER = 7;
1561 private int followingKvCount_;
1562
1563
1564
1565 public boolean hasFollowingKvCount() {
1566 return ((bitField0_ & 0x00000020) == 0x00000020);
1567 }
1568
1569
1570
1571 public int getFollowingKvCount() {
1572 return followingKvCount_;
1573 }
1574
1575
1576 public static final int CLUSTER_IDS_FIELD_NUMBER = 8;
1577 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> clusterIds_;
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() {
1588 return clusterIds_;
1589 }
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>
1600 getClusterIdsOrBuilderList() {
1601 return clusterIds_;
1602 }
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612 public int getClusterIdsCount() {
1613 return clusterIds_.size();
1614 }
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) {
1625 return clusterIds_.get(index);
1626 }
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder(
1637 int index) {
1638 return clusterIds_.get(index);
1639 }
1640
1641
1642 public static final int NONCEGROUP_FIELD_NUMBER = 9;
1643 private long nonceGroup_;
1644
1645
1646
1647 public boolean hasNonceGroup() {
1648 return ((bitField0_ & 0x00000040) == 0x00000040);
1649 }
1650
1651
1652
1653 public long getNonceGroup() {
1654 return nonceGroup_;
1655 }
1656
1657
1658 public static final int NONCE_FIELD_NUMBER = 10;
1659 private long nonce_;
1660
1661
1662
1663 public boolean hasNonce() {
1664 return ((bitField0_ & 0x00000080) == 0x00000080);
1665 }
1666
1667
1668
1669 public long getNonce() {
1670 return nonce_;
1671 }
1672
1673
1674 public static final int ORIG_SEQUENCE_NUMBER_FIELD_NUMBER = 11;
1675 private long origSequenceNumber_;
1676
1677
1678
1679 public boolean hasOrigSequenceNumber() {
1680 return ((bitField0_ & 0x00000100) == 0x00000100);
1681 }
1682
1683
1684
1685 public long getOrigSequenceNumber() {
1686 return origSequenceNumber_;
1687 }
1688
1689 private void initFields() {
1690 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
1691 tableName_ = com.google.protobuf.ByteString.EMPTY;
1692 logSequenceNumber_ = 0L;
1693 writeTime_ = 0L;
1694 clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
1695 scopes_ = java.util.Collections.emptyList();
1696 followingKvCount_ = 0;
1697 clusterIds_ = java.util.Collections.emptyList();
1698 nonceGroup_ = 0L;
1699 nonce_ = 0L;
1700 origSequenceNumber_ = 0L;
1701 }
1702 private byte memoizedIsInitialized = -1;
1703 public final boolean isInitialized() {
1704 byte isInitialized = memoizedIsInitialized;
1705 if (isInitialized != -1) return isInitialized == 1;
1706
1707 if (!hasEncodedRegionName()) {
1708 memoizedIsInitialized = 0;
1709 return false;
1710 }
1711 if (!hasTableName()) {
1712 memoizedIsInitialized = 0;
1713 return false;
1714 }
1715 if (!hasLogSequenceNumber()) {
1716 memoizedIsInitialized = 0;
1717 return false;
1718 }
1719 if (!hasWriteTime()) {
1720 memoizedIsInitialized = 0;
1721 return false;
1722 }
1723 if (hasClusterId()) {
1724 if (!getClusterId().isInitialized()) {
1725 memoizedIsInitialized = 0;
1726 return false;
1727 }
1728 }
1729 for (int i = 0; i < getScopesCount(); i++) {
1730 if (!getScopes(i).isInitialized()) {
1731 memoizedIsInitialized = 0;
1732 return false;
1733 }
1734 }
1735 for (int i = 0; i < getClusterIdsCount(); i++) {
1736 if (!getClusterIds(i).isInitialized()) {
1737 memoizedIsInitialized = 0;
1738 return false;
1739 }
1740 }
1741 memoizedIsInitialized = 1;
1742 return true;
1743 }
1744
1745 public void writeTo(com.google.protobuf.CodedOutputStream output)
1746 throws java.io.IOException {
1747 getSerializedSize();
1748 if (((bitField0_ & 0x00000001) == 0x00000001)) {
1749 output.writeBytes(1, encodedRegionName_);
1750 }
1751 if (((bitField0_ & 0x00000002) == 0x00000002)) {
1752 output.writeBytes(2, tableName_);
1753 }
1754 if (((bitField0_ & 0x00000004) == 0x00000004)) {
1755 output.writeUInt64(3, logSequenceNumber_);
1756 }
1757 if (((bitField0_ & 0x00000008) == 0x00000008)) {
1758 output.writeUInt64(4, writeTime_);
1759 }
1760 if (((bitField0_ & 0x00000010) == 0x00000010)) {
1761 output.writeMessage(5, clusterId_);
1762 }
1763 for (int i = 0; i < scopes_.size(); i++) {
1764 output.writeMessage(6, scopes_.get(i));
1765 }
1766 if (((bitField0_ & 0x00000020) == 0x00000020)) {
1767 output.writeUInt32(7, followingKvCount_);
1768 }
1769 for (int i = 0; i < clusterIds_.size(); i++) {
1770 output.writeMessage(8, clusterIds_.get(i));
1771 }
1772 if (((bitField0_ & 0x00000040) == 0x00000040)) {
1773 output.writeUInt64(9, nonceGroup_);
1774 }
1775 if (((bitField0_ & 0x00000080) == 0x00000080)) {
1776 output.writeUInt64(10, nonce_);
1777 }
1778 if (((bitField0_ & 0x00000100) == 0x00000100)) {
1779 output.writeUInt64(11, origSequenceNumber_);
1780 }
1781 getUnknownFields().writeTo(output);
1782 }
1783
1784 private int memoizedSerializedSize = -1;
1785 public int getSerializedSize() {
1786 int size = memoizedSerializedSize;
1787 if (size != -1) return size;
1788
1789 size = 0;
1790 if (((bitField0_ & 0x00000001) == 0x00000001)) {
1791 size += com.google.protobuf.CodedOutputStream
1792 .computeBytesSize(1, encodedRegionName_);
1793 }
1794 if (((bitField0_ & 0x00000002) == 0x00000002)) {
1795 size += com.google.protobuf.CodedOutputStream
1796 .computeBytesSize(2, tableName_);
1797 }
1798 if (((bitField0_ & 0x00000004) == 0x00000004)) {
1799 size += com.google.protobuf.CodedOutputStream
1800 .computeUInt64Size(3, logSequenceNumber_);
1801 }
1802 if (((bitField0_ & 0x00000008) == 0x00000008)) {
1803 size += com.google.protobuf.CodedOutputStream
1804 .computeUInt64Size(4, writeTime_);
1805 }
1806 if (((bitField0_ & 0x00000010) == 0x00000010)) {
1807 size += com.google.protobuf.CodedOutputStream
1808 .computeMessageSize(5, clusterId_);
1809 }
1810 for (int i = 0; i < scopes_.size(); i++) {
1811 size += com.google.protobuf.CodedOutputStream
1812 .computeMessageSize(6, scopes_.get(i));
1813 }
1814 if (((bitField0_ & 0x00000020) == 0x00000020)) {
1815 size += com.google.protobuf.CodedOutputStream
1816 .computeUInt32Size(7, followingKvCount_);
1817 }
1818 for (int i = 0; i < clusterIds_.size(); i++) {
1819 size += com.google.protobuf.CodedOutputStream
1820 .computeMessageSize(8, clusterIds_.get(i));
1821 }
1822 if (((bitField0_ & 0x00000040) == 0x00000040)) {
1823 size += com.google.protobuf.CodedOutputStream
1824 .computeUInt64Size(9, nonceGroup_);
1825 }
1826 if (((bitField0_ & 0x00000080) == 0x00000080)) {
1827 size += com.google.protobuf.CodedOutputStream
1828 .computeUInt64Size(10, nonce_);
1829 }
1830 if (((bitField0_ & 0x00000100) == 0x00000100)) {
1831 size += com.google.protobuf.CodedOutputStream
1832 .computeUInt64Size(11, origSequenceNumber_);
1833 }
1834 size += getUnknownFields().getSerializedSize();
1835 memoizedSerializedSize = size;
1836 return size;
1837 }
1838
1839 private static final long serialVersionUID = 0L;
1840 @java.lang.Override
1841 protected java.lang.Object writeReplace()
1842 throws java.io.ObjectStreamException {
1843 return super.writeReplace();
1844 }
1845
1846 @java.lang.Override
1847 public boolean equals(final java.lang.Object obj) {
1848 if (obj == this) {
1849 return true;
1850 }
1851 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)) {
1852 return super.equals(obj);
1853 }
1854 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) obj;
1855
1856 boolean result = true;
1857 result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
1858 if (hasEncodedRegionName()) {
1859 result = result && getEncodedRegionName()
1860 .equals(other.getEncodedRegionName());
1861 }
1862 result = result && (hasTableName() == other.hasTableName());
1863 if (hasTableName()) {
1864 result = result && getTableName()
1865 .equals(other.getTableName());
1866 }
1867 result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
1868 if (hasLogSequenceNumber()) {
1869 result = result && (getLogSequenceNumber()
1870 == other.getLogSequenceNumber());
1871 }
1872 result = result && (hasWriteTime() == other.hasWriteTime());
1873 if (hasWriteTime()) {
1874 result = result && (getWriteTime()
1875 == other.getWriteTime());
1876 }
1877 result = result && (hasClusterId() == other.hasClusterId());
1878 if (hasClusterId()) {
1879 result = result && getClusterId()
1880 .equals(other.getClusterId());
1881 }
1882 result = result && getScopesList()
1883 .equals(other.getScopesList());
1884 result = result && (hasFollowingKvCount() == other.hasFollowingKvCount());
1885 if (hasFollowingKvCount()) {
1886 result = result && (getFollowingKvCount()
1887 == other.getFollowingKvCount());
1888 }
1889 result = result && getClusterIdsList()
1890 .equals(other.getClusterIdsList());
1891 result = result && (hasNonceGroup() == other.hasNonceGroup());
1892 if (hasNonceGroup()) {
1893 result = result && (getNonceGroup()
1894 == other.getNonceGroup());
1895 }
1896 result = result && (hasNonce() == other.hasNonce());
1897 if (hasNonce()) {
1898 result = result && (getNonce()
1899 == other.getNonce());
1900 }
1901 result = result && (hasOrigSequenceNumber() == other.hasOrigSequenceNumber());
1902 if (hasOrigSequenceNumber()) {
1903 result = result && (getOrigSequenceNumber()
1904 == other.getOrigSequenceNumber());
1905 }
1906 result = result &&
1907 getUnknownFields().equals(other.getUnknownFields());
1908 return result;
1909 }
1910
1911 private int memoizedHashCode = 0;
1912 @java.lang.Override
1913 public int hashCode() {
1914 if (memoizedHashCode != 0) {
1915 return memoizedHashCode;
1916 }
1917 int hash = 41;
1918 hash = (19 * hash) + getDescriptorForType().hashCode();
1919 if (hasEncodedRegionName()) {
1920 hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
1921 hash = (53 * hash) + getEncodedRegionName().hashCode();
1922 }
1923 if (hasTableName()) {
1924 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
1925 hash = (53 * hash) + getTableName().hashCode();
1926 }
1927 if (hasLogSequenceNumber()) {
1928 hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
1929 hash = (53 * hash) + hashLong(getLogSequenceNumber());
1930 }
1931 if (hasWriteTime()) {
1932 hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER;
1933 hash = (53 * hash) + hashLong(getWriteTime());
1934 }
1935 if (hasClusterId()) {
1936 hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
1937 hash = (53 * hash) + getClusterId().hashCode();
1938 }
1939 if (getScopesCount() > 0) {
1940 hash = (37 * hash) + SCOPES_FIELD_NUMBER;
1941 hash = (53 * hash) + getScopesList().hashCode();
1942 }
1943 if (hasFollowingKvCount()) {
1944 hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER;
1945 hash = (53 * hash) + getFollowingKvCount();
1946 }
1947 if (getClusterIdsCount() > 0) {
1948 hash = (37 * hash) + CLUSTER_IDS_FIELD_NUMBER;
1949 hash = (53 * hash) + getClusterIdsList().hashCode();
1950 }
1951 if (hasNonceGroup()) {
1952 hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER;
1953 hash = (53 * hash) + hashLong(getNonceGroup());
1954 }
1955 if (hasNonce()) {
1956 hash = (37 * hash) + NONCE_FIELD_NUMBER;
1957 hash = (53 * hash) + hashLong(getNonce());
1958 }
1959 if (hasOrigSequenceNumber()) {
1960 hash = (37 * hash) + ORIG_SEQUENCE_NUMBER_FIELD_NUMBER;
1961 hash = (53 * hash) + hashLong(getOrigSequenceNumber());
1962 }
1963 hash = (29 * hash) + getUnknownFields().hashCode();
1964 memoizedHashCode = hash;
1965 return hash;
1966 }
1967
1968 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
1969 com.google.protobuf.ByteString data)
1970 throws com.google.protobuf.InvalidProtocolBufferException {
1971 return PARSER.parseFrom(data);
1972 }
1973 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
1974 com.google.protobuf.ByteString data,
1975 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1976 throws com.google.protobuf.InvalidProtocolBufferException {
1977 return PARSER.parseFrom(data, extensionRegistry);
1978 }
1979 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(byte[] data)
1980 throws com.google.protobuf.InvalidProtocolBufferException {
1981 return PARSER.parseFrom(data);
1982 }
1983 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
1984 byte[] data,
1985 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1986 throws com.google.protobuf.InvalidProtocolBufferException {
1987 return PARSER.parseFrom(data, extensionRegistry);
1988 }
1989 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input)
1990 throws java.io.IOException {
1991 return PARSER.parseFrom(input);
1992 }
1993 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
1994 java.io.InputStream input,
1995 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1996 throws java.io.IOException {
1997 return PARSER.parseFrom(input, extensionRegistry);
1998 }
1999 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input)
2000 throws java.io.IOException {
2001 return PARSER.parseDelimitedFrom(input);
2002 }
2003 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(
2004 java.io.InputStream input,
2005 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2006 throws java.io.IOException {
2007 return PARSER.parseDelimitedFrom(input, extensionRegistry);
2008 }
2009 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
2010 com.google.protobuf.CodedInputStream input)
2011 throws java.io.IOException {
2012 return PARSER.parseFrom(input);
2013 }
2014 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
2015 com.google.protobuf.CodedInputStream input,
2016 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2017 throws java.io.IOException {
2018 return PARSER.parseFrom(input, extensionRegistry);
2019 }
2020
2021 public static Builder newBuilder() { return Builder.create(); }
2022 public Builder newBuilderForType() { return newBuilder(); }
2023 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey prototype) {
2024 return newBuilder().mergeFrom(prototype);
2025 }
2026 public Builder toBuilder() { return newBuilder(this); }
2027
2028 @java.lang.Override
2029 protected Builder newBuilderForType(
2030 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2031 Builder builder = new Builder(parent);
2032 return builder;
2033 }
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043 public static final class Builder extends
2044 com.google.protobuf.GeneratedMessage.Builder<Builder>
2045 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder {
2046 public static final com.google.protobuf.Descriptors.Descriptor
2047 getDescriptor() {
2048 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_descriptor;
2049 }
2050
2051 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2052 internalGetFieldAccessorTable() {
2053 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_fieldAccessorTable
2054 .ensureFieldAccessorsInitialized(
2055 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class);
2056 }
2057
2058
2059 private Builder() {
2060 maybeForceBuilderInitialization();
2061 }
2062
2063 private Builder(
2064 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2065 super(parent);
2066 maybeForceBuilderInitialization();
2067 }
2068 private void maybeForceBuilderInitialization() {
2069 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2070 getClusterIdFieldBuilder();
2071 getScopesFieldBuilder();
2072 getClusterIdsFieldBuilder();
2073 }
2074 }
2075 private static Builder create() {
2076 return new Builder();
2077 }
2078
2079 public Builder clear() {
2080 super.clear();
2081 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
2082 bitField0_ = (bitField0_ & ~0x00000001);
2083 tableName_ = com.google.protobuf.ByteString.EMPTY;
2084 bitField0_ = (bitField0_ & ~0x00000002);
2085 logSequenceNumber_ = 0L;
2086 bitField0_ = (bitField0_ & ~0x00000004);
2087 writeTime_ = 0L;
2088 bitField0_ = (bitField0_ & ~0x00000008);
2089 if (clusterIdBuilder_ == null) {
2090 clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
2091 } else {
2092 clusterIdBuilder_.clear();
2093 }
2094 bitField0_ = (bitField0_ & ~0x00000010);
2095 if (scopesBuilder_ == null) {
2096 scopes_ = java.util.Collections.emptyList();
2097 bitField0_ = (bitField0_ & ~0x00000020);
2098 } else {
2099 scopesBuilder_.clear();
2100 }
2101 followingKvCount_ = 0;
2102 bitField0_ = (bitField0_ & ~0x00000040);
2103 if (clusterIdsBuilder_ == null) {
2104 clusterIds_ = java.util.Collections.emptyList();
2105 bitField0_ = (bitField0_ & ~0x00000080);
2106 } else {
2107 clusterIdsBuilder_.clear();
2108 }
2109 nonceGroup_ = 0L;
2110 bitField0_ = (bitField0_ & ~0x00000100);
2111 nonce_ = 0L;
2112 bitField0_ = (bitField0_ & ~0x00000200);
2113 origSequenceNumber_ = 0L;
2114 bitField0_ = (bitField0_ & ~0x00000400);
2115 return this;
2116 }
2117
2118 public Builder clone() {
2119 return create().mergeFrom(buildPartial());
2120 }
2121
2122 public com.google.protobuf.Descriptors.Descriptor
2123 getDescriptorForType() {
2124 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_descriptor;
2125 }
2126
2127 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() {
2128 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
2129 }
2130
2131 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey build() {
2132 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = buildPartial();
2133 if (!result.isInitialized()) {
2134 throw newUninitializedMessageException(result);
2135 }
2136 return result;
2137 }
2138
2139 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() {
2140 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this);
2141 int from_bitField0_ = bitField0_;
2142 int to_bitField0_ = 0;
2143 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2144 to_bitField0_ |= 0x00000001;
2145 }
2146 result.encodedRegionName_ = encodedRegionName_;
2147 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2148 to_bitField0_ |= 0x00000002;
2149 }
2150 result.tableName_ = tableName_;
2151 if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2152 to_bitField0_ |= 0x00000004;
2153 }
2154 result.logSequenceNumber_ = logSequenceNumber_;
2155 if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2156 to_bitField0_ |= 0x00000008;
2157 }
2158 result.writeTime_ = writeTime_;
2159 if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2160 to_bitField0_ |= 0x00000010;
2161 }
2162 if (clusterIdBuilder_ == null) {
2163 result.clusterId_ = clusterId_;
2164 } else {
2165 result.clusterId_ = clusterIdBuilder_.build();
2166 }
2167 if (scopesBuilder_ == null) {
2168 if (((bitField0_ & 0x00000020) == 0x00000020)) {
2169 scopes_ = java.util.Collections.unmodifiableList(scopes_);
2170 bitField0_ = (bitField0_ & ~0x00000020);
2171 }
2172 result.scopes_ = scopes_;
2173 } else {
2174 result.scopes_ = scopesBuilder_.build();
2175 }
2176 if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2177 to_bitField0_ |= 0x00000020;
2178 }
2179 result.followingKvCount_ = followingKvCount_;
2180 if (clusterIdsBuilder_ == null) {
2181 if (((bitField0_ & 0x00000080) == 0x00000080)) {
2182 clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_);
2183 bitField0_ = (bitField0_ & ~0x00000080);
2184 }
2185 result.clusterIds_ = clusterIds_;
2186 } else {
2187 result.clusterIds_ = clusterIdsBuilder_.build();
2188 }
2189 if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
2190 to_bitField0_ |= 0x00000040;
2191 }
2192 result.nonceGroup_ = nonceGroup_;
2193 if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
2194 to_bitField0_ |= 0x00000080;
2195 }
2196 result.nonce_ = nonce_;
2197 if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
2198 to_bitField0_ |= 0x00000100;
2199 }
2200 result.origSequenceNumber_ = origSequenceNumber_;
2201 result.bitField0_ = to_bitField0_;
2202 onBuilt();
2203 return result;
2204 }
2205
2206 public Builder mergeFrom(com.google.protobuf.Message other) {
2207 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) {
2208 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)other);
2209 } else {
2210 super.mergeFrom(other);
2211 return this;
2212 }
2213 }
2214
2215 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) {
2216 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this;
2217 if (other.hasEncodedRegionName()) {
2218 setEncodedRegionName(other.getEncodedRegionName());
2219 }
2220 if (other.hasTableName()) {
2221 setTableName(other.getTableName());
2222 }
2223 if (other.hasLogSequenceNumber()) {
2224 setLogSequenceNumber(other.getLogSequenceNumber());
2225 }
2226 if (other.hasWriteTime()) {
2227 setWriteTime(other.getWriteTime());
2228 }
2229 if (other.hasClusterId()) {
2230 mergeClusterId(other.getClusterId());
2231 }
2232 if (scopesBuilder_ == null) {
2233 if (!other.scopes_.isEmpty()) {
2234 if (scopes_.isEmpty()) {
2235 scopes_ = other.scopes_;
2236 bitField0_ = (bitField0_ & ~0x00000020);
2237 } else {
2238 ensureScopesIsMutable();
2239 scopes_.addAll(other.scopes_);
2240 }
2241 onChanged();
2242 }
2243 } else {
2244 if (!other.scopes_.isEmpty()) {
2245 if (scopesBuilder_.isEmpty()) {
2246 scopesBuilder_.dispose();
2247 scopesBuilder_ = null;
2248 scopes_ = other.scopes_;
2249 bitField0_ = (bitField0_ & ~0x00000020);
2250 scopesBuilder_ =
2251 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
2252 getScopesFieldBuilder() : null;
2253 } else {
2254 scopesBuilder_.addAllMessages(other.scopes_);
2255 }
2256 }
2257 }
2258 if (other.hasFollowingKvCount()) {
2259 setFollowingKvCount(other.getFollowingKvCount());
2260 }
2261 if (clusterIdsBuilder_ == null) {
2262 if (!other.clusterIds_.isEmpty()) {
2263 if (clusterIds_.isEmpty()) {
2264 clusterIds_ = other.clusterIds_;
2265 bitField0_ = (bitField0_ & ~0x00000080);
2266 } else {
2267 ensureClusterIdsIsMutable();
2268 clusterIds_.addAll(other.clusterIds_);
2269 }
2270 onChanged();
2271 }
2272 } else {
2273 if (!other.clusterIds_.isEmpty()) {
2274 if (clusterIdsBuilder_.isEmpty()) {
2275 clusterIdsBuilder_.dispose();
2276 clusterIdsBuilder_ = null;
2277 clusterIds_ = other.clusterIds_;
2278 bitField0_ = (bitField0_ & ~0x00000080);
2279 clusterIdsBuilder_ =
2280 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
2281 getClusterIdsFieldBuilder() : null;
2282 } else {
2283 clusterIdsBuilder_.addAllMessages(other.clusterIds_);
2284 }
2285 }
2286 }
2287 if (other.hasNonceGroup()) {
2288 setNonceGroup(other.getNonceGroup());
2289 }
2290 if (other.hasNonce()) {
2291 setNonce(other.getNonce());
2292 }
2293 if (other.hasOrigSequenceNumber()) {
2294 setOrigSequenceNumber(other.getOrigSequenceNumber());
2295 }
2296 this.mergeUnknownFields(other.getUnknownFields());
2297 return this;
2298 }
2299
2300 public final boolean isInitialized() {
2301 if (!hasEncodedRegionName()) {
2302
2303 return false;
2304 }
2305 if (!hasTableName()) {
2306
2307 return false;
2308 }
2309 if (!hasLogSequenceNumber()) {
2310
2311 return false;
2312 }
2313 if (!hasWriteTime()) {
2314
2315 return false;
2316 }
2317 if (hasClusterId()) {
2318 if (!getClusterId().isInitialized()) {
2319
2320 return false;
2321 }
2322 }
2323 for (int i = 0; i < getScopesCount(); i++) {
2324 if (!getScopes(i).isInitialized()) {
2325
2326 return false;
2327 }
2328 }
2329 for (int i = 0; i < getClusterIdsCount(); i++) {
2330 if (!getClusterIds(i).isInitialized()) {
2331
2332 return false;
2333 }
2334 }
2335 return true;
2336 }
2337
2338 public Builder mergeFrom(
2339 com.google.protobuf.CodedInputStream input,
2340 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2341 throws java.io.IOException {
2342 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parsedMessage = null;
2343 try {
2344 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2345 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2346 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) e.getUnfinishedMessage();
2347 throw e;
2348 } finally {
2349 if (parsedMessage != null) {
2350 mergeFrom(parsedMessage);
2351 }
2352 }
2353 return this;
2354 }
2355 private int bitField0_;
2356
2357
2358 private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
2359
2360
2361
2362 public boolean hasEncodedRegionName() {
2363 return ((bitField0_ & 0x00000001) == 0x00000001);
2364 }
2365
2366
2367
2368 public com.google.protobuf.ByteString getEncodedRegionName() {
2369 return encodedRegionName_;
2370 }
2371
2372
2373
2374 public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
2375 if (value == null) {
2376 throw new NullPointerException();
2377 }
2378 bitField0_ |= 0x00000001;
2379 encodedRegionName_ = value;
2380 onChanged();
2381 return this;
2382 }
2383
2384
2385
2386 public Builder clearEncodedRegionName() {
2387 bitField0_ = (bitField0_ & ~0x00000001);
2388 encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
2389 onChanged();
2390 return this;
2391 }
2392
2393
2394 private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
2395
2396
2397
2398 public boolean hasTableName() {
2399 return ((bitField0_ & 0x00000002) == 0x00000002);
2400 }
2401
2402
2403
2404 public com.google.protobuf.ByteString getTableName() {
2405 return tableName_;
2406 }
2407
2408
2409
2410 public Builder setTableName(com.google.protobuf.ByteString value) {
2411 if (value == null) {
2412 throw new NullPointerException();
2413 }
2414 bitField0_ |= 0x00000002;
2415 tableName_ = value;
2416 onChanged();
2417 return this;
2418 }
2419
2420
2421
2422 public Builder clearTableName() {
2423 bitField0_ = (bitField0_ & ~0x00000002);
2424 tableName_ = getDefaultInstance().getTableName();
2425 onChanged();
2426 return this;
2427 }
2428
2429
2430 private long logSequenceNumber_ ;
2431
2432
2433
2434 public boolean hasLogSequenceNumber() {
2435 return ((bitField0_ & 0x00000004) == 0x00000004);
2436 }
2437
2438
2439
2440 public long getLogSequenceNumber() {
2441 return logSequenceNumber_;
2442 }
2443
2444
2445
2446 public Builder setLogSequenceNumber(long value) {
2447 bitField0_ |= 0x00000004;
2448 logSequenceNumber_ = value;
2449 onChanged();
2450 return this;
2451 }
2452
2453
2454
2455 public Builder clearLogSequenceNumber() {
2456 bitField0_ = (bitField0_ & ~0x00000004);
2457 logSequenceNumber_ = 0L;
2458 onChanged();
2459 return this;
2460 }
2461
2462
2463 private long writeTime_ ;
2464
2465
2466
2467 public boolean hasWriteTime() {
2468 return ((bitField0_ & 0x00000008) == 0x00000008);
2469 }
2470
2471
2472
2473 public long getWriteTime() {
2474 return writeTime_;
2475 }
2476
2477
2478
2479 public Builder setWriteTime(long value) {
2480 bitField0_ |= 0x00000008;
2481 writeTime_ = value;
2482 onChanged();
2483 return this;
2484 }
2485
2486
2487
2488 public Builder clearWriteTime() {
2489 bitField0_ = (bitField0_ & ~0x00000008);
2490 writeTime_ = 0L;
2491 onChanged();
2492 return this;
2493 }
2494
2495
2496 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
2497 private com.google.protobuf.SingleFieldBuilder<
2498 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdBuilder_;
2499
2500
2501
2502
2503
2504
2505
2506
2507
2508
2509
2510 @java.lang.Deprecated public boolean hasClusterId() {
2511 return ((bitField0_ & 0x00000010) == 0x00000010);
2512 }
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524 @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() {
2525 if (clusterIdBuilder_ == null) {
2526 return clusterId_;
2527 } else {
2528 return clusterIdBuilder_.getMessage();
2529 }
2530 }
2531
2532
2533
2534
2535
2536
2537
2538
2539
2540
2541
2542 @java.lang.Deprecated public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
2543 if (clusterIdBuilder_ == null) {
2544 if (value == null) {
2545 throw new NullPointerException();
2546 }
2547 clusterId_ = value;
2548 onChanged();
2549 } else {
2550 clusterIdBuilder_.setMessage(value);
2551 }
2552 bitField0_ |= 0x00000010;
2553 return this;
2554 }
2555
2556
2557
2558
2559
2560
2561
2562
2563
2564
2565
2566 @java.lang.Deprecated public Builder setClusterId(
2567 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
2568 if (clusterIdBuilder_ == null) {
2569 clusterId_ = builderForValue.build();
2570 onChanged();
2571 } else {
2572 clusterIdBuilder_.setMessage(builderForValue.build());
2573 }
2574 bitField0_ |= 0x00000010;
2575 return this;
2576 }
2577
2578
2579
2580
2581
2582
2583
2584
2585
2586
2587
2588 @java.lang.Deprecated public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
2589 if (clusterIdBuilder_ == null) {
2590 if (((bitField0_ & 0x00000010) == 0x00000010) &&
2591 clusterId_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) {
2592 clusterId_ =
2593 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial();
2594 } else {
2595 clusterId_ = value;
2596 }
2597 onChanged();
2598 } else {
2599 clusterIdBuilder_.mergeFrom(value);
2600 }
2601 bitField0_ |= 0x00000010;
2602 return this;
2603 }
2604
2605
2606
2607
2608
2609
2610
2611
2612
2613
2614
2615 @java.lang.Deprecated public Builder clearClusterId() {
2616 if (clusterIdBuilder_ == null) {
2617 clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
2618 onChanged();
2619 } else {
2620 clusterIdBuilder_.clear();
2621 }
2622 bitField0_ = (bitField0_ & ~0x00000010);
2623 return this;
2624 }
2625
2626
2627
2628
2629
2630
2631
2632
2633
2634
2635
2636 @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdBuilder() {
2637 bitField0_ |= 0x00000010;
2638 onChanged();
2639 return getClusterIdFieldBuilder().getBuilder();
2640 }
2641
2642
2643
2644
2645
2646
2647
2648
2649
2650
2651
2652 @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() {
2653 if (clusterIdBuilder_ != null) {
2654 return clusterIdBuilder_.getMessageOrBuilder();
2655 } else {
2656 return clusterId_;
2657 }
2658 }
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669
2670 private com.google.protobuf.SingleFieldBuilder<
2671 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>
2672 getClusterIdFieldBuilder() {
2673 if (clusterIdBuilder_ == null) {
2674 clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2675 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>(
2676 clusterId_,
2677 getParentForChildren(),
2678 isClean());
2679 clusterId_ = null;
2680 }
2681 return clusterIdBuilder_;
2682 }
2683
2684
2685 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> scopes_ =
2686 java.util.Collections.emptyList();
2687 private void ensureScopesIsMutable() {
2688 if (!((bitField0_ & 0x00000020) == 0x00000020)) {
2689 scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope>(scopes_);
2690 bitField0_ |= 0x00000020;
2691 }
2692 }
2693
2694 private com.google.protobuf.RepeatedFieldBuilder<
2695 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> scopesBuilder_;
2696
2697
2698
2699
2700 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList() {
2701 if (scopesBuilder_ == null) {
2702 return java.util.Collections.unmodifiableList(scopes_);
2703 } else {
2704 return scopesBuilder_.getMessageList();
2705 }
2706 }
2707
2708
2709
2710 public int getScopesCount() {
2711 if (scopesBuilder_ == null) {
2712 return scopes_.size();
2713 } else {
2714 return scopesBuilder_.getCount();
2715 }
2716 }
2717
2718
2719
2720 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) {
2721 if (scopesBuilder_ == null) {
2722 return scopes_.get(index);
2723 } else {
2724 return scopesBuilder_.getMessage(index);
2725 }
2726 }
2727
2728
2729
2730 public Builder setScopes(
2731 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
2732 if (scopesBuilder_ == null) {
2733 if (value == null) {
2734 throw new NullPointerException();
2735 }
2736 ensureScopesIsMutable();
2737 scopes_.set(index, value);
2738 onChanged();
2739 } else {
2740 scopesBuilder_.setMessage(index, value);
2741 }
2742 return this;
2743 }
2744
2745
2746
2747 public Builder setScopes(
2748 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
2749 if (scopesBuilder_ == null) {
2750 ensureScopesIsMutable();
2751 scopes_.set(index, builderForValue.build());
2752 onChanged();
2753 } else {
2754 scopesBuilder_.setMessage(index, builderForValue.build());
2755 }
2756 return this;
2757 }
2758
2759
2760
2761 public Builder addScopes(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
2762 if (scopesBuilder_ == null) {
2763 if (value == null) {
2764 throw new NullPointerException();
2765 }
2766 ensureScopesIsMutable();
2767 scopes_.add(value);
2768 onChanged();
2769 } else {
2770 scopesBuilder_.addMessage(value);
2771 }
2772 return this;
2773 }
2774
2775
2776
2777 public Builder addScopes(
2778 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
2779 if (scopesBuilder_ == null) {
2780 if (value == null) {
2781 throw new NullPointerException();
2782 }
2783 ensureScopesIsMutable();
2784 scopes_.add(index, value);
2785 onChanged();
2786 } else {
2787 scopesBuilder_.addMessage(index, value);
2788 }
2789 return this;
2790 }
2791
2792
2793
2794 public Builder addScopes(
2795 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
2796 if (scopesBuilder_ == null) {
2797 ensureScopesIsMutable();
2798 scopes_.add(builderForValue.build());
2799 onChanged();
2800 } else {
2801 scopesBuilder_.addMessage(builderForValue.build());
2802 }
2803 return this;
2804 }
2805
2806
2807
2808 public Builder addScopes(
2809 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
2810 if (scopesBuilder_ == null) {
2811 ensureScopesIsMutable();
2812 scopes_.add(index, builderForValue.build());
2813 onChanged();
2814 } else {
2815 scopesBuilder_.addMessage(index, builderForValue.build());
2816 }
2817 return this;
2818 }
2819
2820
2821
2822 public Builder addAllScopes(
2823 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> values) {
2824 if (scopesBuilder_ == null) {
2825 ensureScopesIsMutable();
2826 super.addAll(values, scopes_);
2827 onChanged();
2828 } else {
2829 scopesBuilder_.addAllMessages(values);
2830 }
2831 return this;
2832 }
2833
2834
2835
2836 public Builder clearScopes() {
2837 if (scopesBuilder_ == null) {
2838 scopes_ = java.util.Collections.emptyList();
2839 bitField0_ = (bitField0_ & ~0x00000020);
2840 onChanged();
2841 } else {
2842 scopesBuilder_.clear();
2843 }
2844 return this;
2845 }
2846
2847
2848
2849 public Builder removeScopes(int index) {
2850 if (scopesBuilder_ == null) {
2851 ensureScopesIsMutable();
2852 scopes_.remove(index);
2853 onChanged();
2854 } else {
2855 scopesBuilder_.remove(index);
2856 }
2857 return this;
2858 }
2859
2860
2861
2862 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder getScopesBuilder(
2863 int index) {
2864 return getScopesFieldBuilder().getBuilder(index);
2865 }
2866
2867
2868
2869 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
2870 int index) {
2871 if (scopesBuilder_ == null) {
2872 return scopes_.get(index); } else {
2873 return scopesBuilder_.getMessageOrBuilder(index);
2874 }
2875 }
2876
2877
2878
2879 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>
2880 getScopesOrBuilderList() {
2881 if (scopesBuilder_ != null) {
2882 return scopesBuilder_.getMessageOrBuilderList();
2883 } else {
2884 return java.util.Collections.unmodifiableList(scopes_);
2885 }
2886 }
2887
2888
2889
2890 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder() {
2891 return getScopesFieldBuilder().addBuilder(
2892 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance());
2893 }
2894
2895
2896
2897 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder(
2898 int index) {
2899 return getScopesFieldBuilder().addBuilder(
2900 index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance());
2901 }
2902
2903
2904
2905 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder>
2906 getScopesBuilderList() {
2907 return getScopesFieldBuilder().getBuilderList();
2908 }
2909 private com.google.protobuf.RepeatedFieldBuilder<
2910 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>
2911 getScopesFieldBuilder() {
2912 if (scopesBuilder_ == null) {
2913 scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
2914 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>(
2915 scopes_,
2916 ((bitField0_ & 0x00000020) == 0x00000020),
2917 getParentForChildren(),
2918 isClean());
2919 scopes_ = null;
2920 }
2921 return scopesBuilder_;
2922 }
2923
2924
2925 private int followingKvCount_ ;
2926
2927
2928
2929 public boolean hasFollowingKvCount() {
2930 return ((bitField0_ & 0x00000040) == 0x00000040);
2931 }
2932
2933
2934
2935 public int getFollowingKvCount() {
2936 return followingKvCount_;
2937 }
2938
2939
2940
2941 public Builder setFollowingKvCount(int value) {
2942 bitField0_ |= 0x00000040;
2943 followingKvCount_ = value;
2944 onChanged();
2945 return this;
2946 }
2947
2948
2949
2950 public Builder clearFollowingKvCount() {
2951 bitField0_ = (bitField0_ & ~0x00000040);
2952 followingKvCount_ = 0;
2953 onChanged();
2954 return this;
2955 }
2956
2957
2958 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> clusterIds_ =
2959 java.util.Collections.emptyList();
2960 private void ensureClusterIdsIsMutable() {
2961 if (!((bitField0_ & 0x00000080) == 0x00000080)) {
2962 clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID>(clusterIds_);
2963 bitField0_ |= 0x00000080;
2964 }
2965 }
2966
2967 private com.google.protobuf.RepeatedFieldBuilder<
2968 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdsBuilder_;
2969
2970
2971
2972
2973
2974
2975
2976
2977
2978
2979 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() {
2980 if (clusterIdsBuilder_ == null) {
2981 return java.util.Collections.unmodifiableList(clusterIds_);
2982 } else {
2983 return clusterIdsBuilder_.getMessageList();
2984 }
2985 }
2986
2987
2988
2989
2990
2991
2992
2993
2994
2995 public int getClusterIdsCount() {
2996 if (clusterIdsBuilder_ == null) {
2997 return clusterIds_.size();
2998 } else {
2999 return clusterIdsBuilder_.getCount();
3000 }
3001 }
3002
3003
3004
3005
3006
3007
3008
3009
3010
3011 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) {
3012 if (clusterIdsBuilder_ == null) {
3013 return clusterIds_.get(index);
3014 } else {
3015 return clusterIdsBuilder_.getMessage(index);
3016 }
3017 }
3018
3019
3020
3021
3022
3023
3024
3025
3026
3027 public Builder setClusterIds(
3028 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
3029 if (clusterIdsBuilder_ == null) {
3030 if (value == null) {
3031 throw new NullPointerException();
3032 }
3033 ensureClusterIdsIsMutable();
3034 clusterIds_.set(index, value);
3035 onChanged();
3036 } else {
3037 clusterIdsBuilder_.setMessage(index, value);
3038 }
3039 return this;
3040 }
3041
3042
3043
3044
3045
3046
3047
3048
3049
3050 public Builder setClusterIds(
3051 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
3052 if (clusterIdsBuilder_ == null) {
3053 ensureClusterIdsIsMutable();
3054 clusterIds_.set(index, builderForValue.build());
3055 onChanged();
3056 } else {
3057 clusterIdsBuilder_.setMessage(index, builderForValue.build());
3058 }
3059 return this;
3060 }
3061
3062
3063
3064
3065
3066
3067
3068
3069
3070 public Builder addClusterIds(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
3071 if (clusterIdsBuilder_ == null) {
3072 if (value == null) {
3073 throw new NullPointerException();
3074 }
3075 ensureClusterIdsIsMutable();
3076 clusterIds_.add(value);
3077 onChanged();
3078 } else {
3079 clusterIdsBuilder_.addMessage(value);
3080 }
3081 return this;
3082 }
3083
3084
3085
3086
3087
3088
3089
3090
3091
3092 public Builder addClusterIds(
3093 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
3094 if (clusterIdsBuilder_ == null) {
3095 if (value == null) {
3096 throw new NullPointerException();
3097 }
3098 ensureClusterIdsIsMutable();
3099 clusterIds_.add(index, value);
3100 onChanged();
3101 } else {
3102 clusterIdsBuilder_.addMessage(index, value);
3103 }
3104 return this;
3105 }
3106
3107
3108
3109
3110
3111
3112
3113
3114
3115 public Builder addClusterIds(
3116 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
3117 if (clusterIdsBuilder_ == null) {
3118 ensureClusterIdsIsMutable();
3119 clusterIds_.add(builderForValue.build());
3120 onChanged();
3121 } else {
3122 clusterIdsBuilder_.addMessage(builderForValue.build());
3123 }
3124 return this;
3125 }
3126
3127
3128
3129
3130
3131
3132
3133
3134
3135 public Builder addClusterIds(
3136 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
3137 if (clusterIdsBuilder_ == null) {
3138 ensureClusterIdsIsMutable();
3139 clusterIds_.add(index, builderForValue.build());
3140 onChanged();
3141 } else {
3142 clusterIdsBuilder_.addMessage(index, builderForValue.build());
3143 }
3144 return this;
3145 }
3146
3147
3148
3149
3150
3151
3152
3153
3154
3155 public Builder addAllClusterIds(
3156 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> values) {
3157 if (clusterIdsBuilder_ == null) {
3158 ensureClusterIdsIsMutable();
3159 super.addAll(values, clusterIds_);
3160 onChanged();
3161 } else {
3162 clusterIdsBuilder_.addAllMessages(values);
3163 }
3164 return this;
3165 }
3166
3167
3168
3169
3170
3171
3172
3173
3174
3175 public Builder clearClusterIds() {
3176 if (clusterIdsBuilder_ == null) {
3177 clusterIds_ = java.util.Collections.emptyList();
3178 bitField0_ = (bitField0_ & ~0x00000080);
3179 onChanged();
3180 } else {
3181 clusterIdsBuilder_.clear();
3182 }
3183 return this;
3184 }
3185
3186
3187
3188
3189
3190
3191
3192
3193
3194 public Builder removeClusterIds(int index) {
3195 if (clusterIdsBuilder_ == null) {
3196 ensureClusterIdsIsMutable();
3197 clusterIds_.remove(index);
3198 onChanged();
3199 } else {
3200 clusterIdsBuilder_.remove(index);
3201 }
3202 return this;
3203 }
3204
3205
3206
3207
3208
3209
3210
3211
3212
3213 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdsBuilder(
3214 int index) {
3215 return getClusterIdsFieldBuilder().getBuilder(index);
3216 }
3217
3218
3219
3220
3221
3222
3223
3224
3225
3226 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder(
3227 int index) {
3228 if (clusterIdsBuilder_ == null) {
3229 return clusterIds_.get(index); } else {
3230 return clusterIdsBuilder_.getMessageOrBuilder(index);
3231 }
3232 }
3233
3234
3235
3236
3237
3238
3239
3240
3241
3242 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>
3243 getClusterIdsOrBuilderList() {
3244 if (clusterIdsBuilder_ != null) {
3245 return clusterIdsBuilder_.getMessageOrBuilderList();
3246 } else {
3247 return java.util.Collections.unmodifiableList(clusterIds_);
3248 }
3249 }
3250
3251
3252
3253
3254
3255
3256
3257
3258
3259 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder() {
3260 return getClusterIdsFieldBuilder().addBuilder(
3261 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance());
3262 }
3263
3264
3265
3266
3267
3268
3269
3270
3271
3272 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder(
3273 int index) {
3274 return getClusterIdsFieldBuilder().addBuilder(
3275 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance());
3276 }
3277
3278
3279
3280
3281
3282
3283
3284
3285
3286 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder>
3287 getClusterIdsBuilderList() {
3288 return getClusterIdsFieldBuilder().getBuilderList();
3289 }
3290 private com.google.protobuf.RepeatedFieldBuilder<
3291 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>
3292 getClusterIdsFieldBuilder() {
3293 if (clusterIdsBuilder_ == null) {
3294 clusterIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3295 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>(
3296 clusterIds_,
3297 ((bitField0_ & 0x00000080) == 0x00000080),
3298 getParentForChildren(),
3299 isClean());
3300 clusterIds_ = null;
3301 }
3302 return clusterIdsBuilder_;
3303 }
3304
3305
3306 private long nonceGroup_ ;
3307
3308
3309
3310 public boolean hasNonceGroup() {
3311 return ((bitField0_ & 0x00000100) == 0x00000100);
3312 }
3313
3314
3315
3316 public long getNonceGroup() {
3317 return nonceGroup_;
3318 }
3319
3320
3321
3322 public Builder setNonceGroup(long value) {
3323 bitField0_ |= 0x00000100;
3324 nonceGroup_ = value;
3325 onChanged();
3326 return this;
3327 }
3328
3329
3330
3331 public Builder clearNonceGroup() {
3332 bitField0_ = (bitField0_ & ~0x00000100);
3333 nonceGroup_ = 0L;
3334 onChanged();
3335 return this;
3336 }
3337
3338
3339 private long nonce_ ;
3340
3341
3342
3343 public boolean hasNonce() {
3344 return ((bitField0_ & 0x00000200) == 0x00000200);
3345 }
3346
3347
3348
3349 public long getNonce() {
3350 return nonce_;
3351 }
3352
3353
3354
3355 public Builder setNonce(long value) {
3356 bitField0_ |= 0x00000200;
3357 nonce_ = value;
3358 onChanged();
3359 return this;
3360 }
3361
3362
3363
3364 public Builder clearNonce() {
3365 bitField0_ = (bitField0_ & ~0x00000200);
3366 nonce_ = 0L;
3367 onChanged();
3368 return this;
3369 }
3370
3371
3372 private long origSequenceNumber_ ;
3373
3374
3375
3376 public boolean hasOrigSequenceNumber() {
3377 return ((bitField0_ & 0x00000400) == 0x00000400);
3378 }
3379
3380
3381
3382 public long getOrigSequenceNumber() {
3383 return origSequenceNumber_;
3384 }
3385
3386
3387
3388 public Builder setOrigSequenceNumber(long value) {
3389 bitField0_ |= 0x00000400;
3390 origSequenceNumber_ = value;
3391 onChanged();
3392 return this;
3393 }
3394
3395
3396
3397 public Builder clearOrigSequenceNumber() {
3398 bitField0_ = (bitField0_ & ~0x00000400);
3399 origSequenceNumber_ = 0L;
3400 onChanged();
3401 return this;
3402 }
3403
3404
3405 }
3406
3407 static {
3408 defaultInstance = new WALKey(true);
3409 defaultInstance.initFields();
3410 }
3411
3412
3413 }
3414
3415 public interface FamilyScopeOrBuilder
3416 extends com.google.protobuf.MessageOrBuilder {
3417
3418
3419
3420
3421
3422 boolean hasFamily();
3423
3424
3425
3426 com.google.protobuf.ByteString getFamily();
3427
3428
3429
3430
3431
3432 boolean hasScopeType();
3433
3434
3435
3436 org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType();
3437 }
3438
3439
3440
3441 public static final class FamilyScope extends
3442 com.google.protobuf.GeneratedMessage
3443 implements FamilyScopeOrBuilder {
3444
3445 private FamilyScope(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3446 super(builder);
3447 this.unknownFields = builder.getUnknownFields();
3448 }
3449 private FamilyScope(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3450
3451 private static final FamilyScope defaultInstance;
3452 public static FamilyScope getDefaultInstance() {
3453 return defaultInstance;
3454 }
3455
3456 public FamilyScope getDefaultInstanceForType() {
3457 return defaultInstance;
3458 }
3459
3460 private final com.google.protobuf.UnknownFieldSet unknownFields;
3461 @java.lang.Override
3462 public final com.google.protobuf.UnknownFieldSet
3463 getUnknownFields() {
3464 return this.unknownFields;
3465 }
3466 private FamilyScope(
3467 com.google.protobuf.CodedInputStream input,
3468 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3469 throws com.google.protobuf.InvalidProtocolBufferException {
3470 initFields();
3471 int mutable_bitField0_ = 0;
3472 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3473 com.google.protobuf.UnknownFieldSet.newBuilder();
3474 try {
3475 boolean done = false;
3476 while (!done) {
3477 int tag = input.readTag();
3478 switch (tag) {
3479 case 0:
3480 done = true;
3481 break;
3482 default: {
3483 if (!parseUnknownField(input, unknownFields,
3484 extensionRegistry, tag)) {
3485 done = true;
3486 }
3487 break;
3488 }
3489 case 10: {
3490 bitField0_ |= 0x00000001;
3491 family_ = input.readBytes();
3492 break;
3493 }
3494 case 16: {
3495 int rawValue = input.readEnum();
3496 org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.valueOf(rawValue);
3497 if (value == null) {
3498 unknownFields.mergeVarintField(2, rawValue);
3499 } else {
3500 bitField0_ |= 0x00000002;
3501 scopeType_ = value;
3502 }
3503 break;
3504 }
3505 }
3506 }
3507 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3508 throw e.setUnfinishedMessage(this);
3509 } catch (java.io.IOException e) {
3510 throw new com.google.protobuf.InvalidProtocolBufferException(
3511 e.getMessage()).setUnfinishedMessage(this);
3512 } finally {
3513 this.unknownFields = unknownFields.build();
3514 makeExtensionsImmutable();
3515 }
3516 }
3517 public static final com.google.protobuf.Descriptors.Descriptor
3518 getDescriptor() {
3519 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_descriptor;
3520 }
3521
3522 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3523 internalGetFieldAccessorTable() {
3524 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_fieldAccessorTable
3525 .ensureFieldAccessorsInitialized(
3526 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder.class);
3527 }
3528
3529 public static com.google.protobuf.Parser<FamilyScope> PARSER =
3530 new com.google.protobuf.AbstractParser<FamilyScope>() {
3531 public FamilyScope parsePartialFrom(
3532 com.google.protobuf.CodedInputStream input,
3533 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3534 throws com.google.protobuf.InvalidProtocolBufferException {
3535 return new FamilyScope(input, extensionRegistry);
3536 }
3537 };
3538
3539 @java.lang.Override
3540 public com.google.protobuf.Parser<FamilyScope> getParserForType() {
3541 return PARSER;
3542 }
3543
3544 private int bitField0_;
3545
3546 public static final int FAMILY_FIELD_NUMBER = 1;
3547 private com.google.protobuf.ByteString family_;
3548
3549
3550
3551 public boolean hasFamily() {
3552 return ((bitField0_ & 0x00000001) == 0x00000001);
3553 }
3554
3555
3556
3557 public com.google.protobuf.ByteString getFamily() {
3558 return family_;
3559 }
3560
3561
3562 public static final int SCOPE_TYPE_FIELD_NUMBER = 2;
3563 private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_;
3564
3565
3566
3567 public boolean hasScopeType() {
3568 return ((bitField0_ & 0x00000002) == 0x00000002);
3569 }
3570
3571
3572
3573 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() {
3574 return scopeType_;
3575 }
3576
3577 private void initFields() {
3578 family_ = com.google.protobuf.ByteString.EMPTY;
3579 scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
3580 }
3581 private byte memoizedIsInitialized = -1;
3582 public final boolean isInitialized() {
3583 byte isInitialized = memoizedIsInitialized;
3584 if (isInitialized != -1) return isInitialized == 1;
3585
3586 if (!hasFamily()) {
3587 memoizedIsInitialized = 0;
3588 return false;
3589 }
3590 if (!hasScopeType()) {
3591 memoizedIsInitialized = 0;
3592 return false;
3593 }
3594 memoizedIsInitialized = 1;
3595 return true;
3596 }
3597
3598 public void writeTo(com.google.protobuf.CodedOutputStream output)
3599 throws java.io.IOException {
3600 getSerializedSize();
3601 if (((bitField0_ & 0x00000001) == 0x00000001)) {
3602 output.writeBytes(1, family_);
3603 }
3604 if (((bitField0_ & 0x00000002) == 0x00000002)) {
3605 output.writeEnum(2, scopeType_.getNumber());
3606 }
3607 getUnknownFields().writeTo(output);
3608 }
3609
3610 private int memoizedSerializedSize = -1;
3611 public int getSerializedSize() {
3612 int size = memoizedSerializedSize;
3613 if (size != -1) return size;
3614
3615 size = 0;
3616 if (((bitField0_ & 0x00000001) == 0x00000001)) {
3617 size += com.google.protobuf.CodedOutputStream
3618 .computeBytesSize(1, family_);
3619 }
3620 if (((bitField0_ & 0x00000002) == 0x00000002)) {
3621 size += com.google.protobuf.CodedOutputStream
3622 .computeEnumSize(2, scopeType_.getNumber());
3623 }
3624 size += getUnknownFields().getSerializedSize();
3625 memoizedSerializedSize = size;
3626 return size;
3627 }
3628
3629 private static final long serialVersionUID = 0L;
3630 @java.lang.Override
3631 protected java.lang.Object writeReplace()
3632 throws java.io.ObjectStreamException {
3633 return super.writeReplace();
3634 }
3635
3636 @java.lang.Override
3637 public boolean equals(final java.lang.Object obj) {
3638 if (obj == this) {
3639 return true;
3640 }
3641 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)) {
3642 return super.equals(obj);
3643 }
3644 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) obj;
3645
3646 boolean result = true;
3647 result = result && (hasFamily() == other.hasFamily());
3648 if (hasFamily()) {
3649 result = result && getFamily()
3650 .equals(other.getFamily());
3651 }
3652 result = result && (hasScopeType() == other.hasScopeType());
3653 if (hasScopeType()) {
3654 result = result &&
3655 (getScopeType() == other.getScopeType());
3656 }
3657 result = result &&
3658 getUnknownFields().equals(other.getUnknownFields());
3659 return result;
3660 }
3661
3662 private int memoizedHashCode = 0;
3663 @java.lang.Override
3664 public int hashCode() {
3665 if (memoizedHashCode != 0) {
3666 return memoizedHashCode;
3667 }
3668 int hash = 41;
3669 hash = (19 * hash) + getDescriptorForType().hashCode();
3670 if (hasFamily()) {
3671 hash = (37 * hash) + FAMILY_FIELD_NUMBER;
3672 hash = (53 * hash) + getFamily().hashCode();
3673 }
3674 if (hasScopeType()) {
3675 hash = (37 * hash) + SCOPE_TYPE_FIELD_NUMBER;
3676 hash = (53 * hash) + hashEnum(getScopeType());
3677 }
3678 hash = (29 * hash) + getUnknownFields().hashCode();
3679 memoizedHashCode = hash;
3680 return hash;
3681 }
3682
3683 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3684 com.google.protobuf.ByteString data)
3685 throws com.google.protobuf.InvalidProtocolBufferException {
3686 return PARSER.parseFrom(data);
3687 }
3688 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3689 com.google.protobuf.ByteString data,
3690 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3691 throws com.google.protobuf.InvalidProtocolBufferException {
3692 return PARSER.parseFrom(data, extensionRegistry);
3693 }
3694 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(byte[] data)
3695 throws com.google.protobuf.InvalidProtocolBufferException {
3696 return PARSER.parseFrom(data);
3697 }
3698 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3699 byte[] data,
3700 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3701 throws com.google.protobuf.InvalidProtocolBufferException {
3702 return PARSER.parseFrom(data, extensionRegistry);
3703 }
3704 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(java.io.InputStream input)
3705 throws java.io.IOException {
3706 return PARSER.parseFrom(input);
3707 }
3708 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3709 java.io.InputStream input,
3710 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3711 throws java.io.IOException {
3712 return PARSER.parseFrom(input, extensionRegistry);
3713 }
3714 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(java.io.InputStream input)
3715 throws java.io.IOException {
3716 return PARSER.parseDelimitedFrom(input);
3717 }
3718 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(
3719 java.io.InputStream input,
3720 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3721 throws java.io.IOException {
3722 return PARSER.parseDelimitedFrom(input, extensionRegistry);
3723 }
3724 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3725 com.google.protobuf.CodedInputStream input)
3726 throws java.io.IOException {
3727 return PARSER.parseFrom(input);
3728 }
3729 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3730 com.google.protobuf.CodedInputStream input,
3731 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3732 throws java.io.IOException {
3733 return PARSER.parseFrom(input, extensionRegistry);
3734 }
3735
3736 public static Builder newBuilder() { return Builder.create(); }
3737 public Builder newBuilderForType() { return newBuilder(); }
3738 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope prototype) {
3739 return newBuilder().mergeFrom(prototype);
3740 }
3741 public Builder toBuilder() { return newBuilder(this); }
3742
3743 @java.lang.Override
3744 protected Builder newBuilderForType(
3745 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3746 Builder builder = new Builder(parent);
3747 return builder;
3748 }
3749
3750
3751
3752 public static final class Builder extends
3753 com.google.protobuf.GeneratedMessage.Builder<Builder>
3754 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder {
3755 public static final com.google.protobuf.Descriptors.Descriptor
3756 getDescriptor() {
3757 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_descriptor;
3758 }
3759
3760 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3761 internalGetFieldAccessorTable() {
3762 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_fieldAccessorTable
3763 .ensureFieldAccessorsInitialized(
3764 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder.class);
3765 }
3766
3767
3768 private Builder() {
3769 maybeForceBuilderInitialization();
3770 }
3771
3772 private Builder(
3773 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3774 super(parent);
3775 maybeForceBuilderInitialization();
3776 }
3777 private void maybeForceBuilderInitialization() {
3778 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3779 }
3780 }
3781 private static Builder create() {
3782 return new Builder();
3783 }
3784
3785 public Builder clear() {
3786 super.clear();
3787 family_ = com.google.protobuf.ByteString.EMPTY;
3788 bitField0_ = (bitField0_ & ~0x00000001);
3789 scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
3790 bitField0_ = (bitField0_ & ~0x00000002);
3791 return this;
3792 }
3793
3794 public Builder clone() {
3795 return create().mergeFrom(buildPartial());
3796 }
3797
3798 public com.google.protobuf.Descriptors.Descriptor
3799 getDescriptorForType() {
3800 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_descriptor;
3801 }
3802
3803 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getDefaultInstanceForType() {
3804 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance();
3805 }
3806
3807 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope build() {
3808 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = buildPartial();
3809 if (!result.isInitialized()) {
3810 throw newUninitializedMessageException(result);
3811 }
3812 return result;
3813 }
3814
3815 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope buildPartial() {
3816 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope(this);
3817 int from_bitField0_ = bitField0_;
3818 int to_bitField0_ = 0;
3819 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3820 to_bitField0_ |= 0x00000001;
3821 }
3822 result.family_ = family_;
3823 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3824 to_bitField0_ |= 0x00000002;
3825 }
3826 result.scopeType_ = scopeType_;
3827 result.bitField0_ = to_bitField0_;
3828 onBuilt();
3829 return result;
3830 }
3831
3832 public Builder mergeFrom(com.google.protobuf.Message other) {
3833 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) {
3834 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)other);
3835 } else {
3836 super.mergeFrom(other);
3837 return this;
3838 }
3839 }
3840
3841 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other) {
3842 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()) return this;
3843 if (other.hasFamily()) {
3844 setFamily(other.getFamily());
3845 }
3846 if (other.hasScopeType()) {
3847 setScopeType(other.getScopeType());
3848 }
3849 this.mergeUnknownFields(other.getUnknownFields());
3850 return this;
3851 }
3852
3853 public final boolean isInitialized() {
3854 if (!hasFamily()) {
3855
3856 return false;
3857 }
3858 if (!hasScopeType()) {
3859
3860 return false;
3861 }
3862 return true;
3863 }
3864
3865 public Builder mergeFrom(
3866 com.google.protobuf.CodedInputStream input,
3867 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3868 throws java.io.IOException {
3869 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parsedMessage = null;
3870 try {
3871 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3872 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3873 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) e.getUnfinishedMessage();
3874 throw e;
3875 } finally {
3876 if (parsedMessage != null) {
3877 mergeFrom(parsedMessage);
3878 }
3879 }
3880 return this;
3881 }
3882 private int bitField0_;
3883
3884
3885 private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
3886
3887
3888
3889 public boolean hasFamily() {
3890 return ((bitField0_ & 0x00000001) == 0x00000001);
3891 }
3892
3893
3894
3895 public com.google.protobuf.ByteString getFamily() {
3896 return family_;
3897 }
3898
3899
3900
3901 public Builder setFamily(com.google.protobuf.ByteString value) {
3902 if (value == null) {
3903 throw new NullPointerException();
3904 }
3905 bitField0_ |= 0x00000001;
3906 family_ = value;
3907 onChanged();
3908 return this;
3909 }
3910
3911
3912
3913 public Builder clearFamily() {
3914 bitField0_ = (bitField0_ & ~0x00000001);
3915 family_ = getDefaultInstance().getFamily();
3916 onChanged();
3917 return this;
3918 }
3919
3920
3921 private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
3922
3923
3924
3925 public boolean hasScopeType() {
3926 return ((bitField0_ & 0x00000002) == 0x00000002);
3927 }
3928
3929
3930
3931 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() {
3932 return scopeType_;
3933 }
3934
3935
3936
3937 public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value) {
3938 if (value == null) {
3939 throw new NullPointerException();
3940 }
3941 bitField0_ |= 0x00000002;
3942 scopeType_ = value;
3943 onChanged();
3944 return this;
3945 }
3946
3947
3948
3949 public Builder clearScopeType() {
3950 bitField0_ = (bitField0_ & ~0x00000002);
3951 scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
3952 onChanged();
3953 return this;
3954 }
3955
3956
3957 }
3958
3959 static {
3960 defaultInstance = new FamilyScope(true);
3961 defaultInstance.initFields();
3962 }
3963
3964
3965 }
3966
3967 public interface CompactionDescriptorOrBuilder
3968 extends com.google.protobuf.MessageOrBuilder {
3969
3970
3971
3972
3973
3974
3975
3976
3977
3978 boolean hasTableName();
3979
3980
3981
3982
3983
3984
3985
3986 com.google.protobuf.ByteString getTableName();
3987
3988
3989
3990
3991
3992 boolean hasEncodedRegionName();
3993
3994
3995
3996 com.google.protobuf.ByteString getEncodedRegionName();
3997
3998
3999
4000
4001
4002 boolean hasFamilyName();
4003
4004
4005
4006 com.google.protobuf.ByteString getFamilyName();
4007
4008
4009
4010
4011
4012
4013
4014
4015
4016 java.util.List<java.lang.String>
4017 getCompactionInputList();
4018
4019
4020
4021
4022
4023
4024
4025 int getCompactionInputCount();
4026
4027
4028
4029
4030
4031
4032
4033 java.lang.String getCompactionInput(int index);
4034
4035
4036
4037
4038
4039
4040
4041 com.google.protobuf.ByteString
4042 getCompactionInputBytes(int index);
4043
4044
4045
4046
4047
4048 java.util.List<java.lang.String>
4049 getCompactionOutputList();
4050
4051
4052
4053 int getCompactionOutputCount();
4054
4055
4056
4057 java.lang.String getCompactionOutput(int index);
4058
4059
4060
4061 com.google.protobuf.ByteString
4062 getCompactionOutputBytes(int index);
4063
4064
4065
4066
4067
4068
4069
4070
4071
4072 boolean hasStoreHomeDir();
4073
4074
4075
4076
4077
4078
4079
4080 java.lang.String getStoreHomeDir();
4081
4082
4083
4084
4085
4086
4087
4088 com.google.protobuf.ByteString
4089 getStoreHomeDirBytes();
4090
4091
4092
4093
4094
4095
4096
4097
4098
4099 boolean hasRegionName();
4100
4101
4102
4103
4104
4105
4106
4107 com.google.protobuf.ByteString getRegionName();
4108 }
4109
4110
4111
4112
4113
4114
4115
4116
4117
4118
4119
4120 public static final class CompactionDescriptor extends
4121 com.google.protobuf.GeneratedMessage
4122 implements CompactionDescriptorOrBuilder {
4123
4124 private CompactionDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4125 super(builder);
4126 this.unknownFields = builder.getUnknownFields();
4127 }
4128 private CompactionDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4129
4130 private static final CompactionDescriptor defaultInstance;
4131 public static CompactionDescriptor getDefaultInstance() {
4132 return defaultInstance;
4133 }
4134
4135 public CompactionDescriptor getDefaultInstanceForType() {
4136 return defaultInstance;
4137 }
4138
4139 private final com.google.protobuf.UnknownFieldSet unknownFields;
4140 @java.lang.Override
4141 public final com.google.protobuf.UnknownFieldSet
4142 getUnknownFields() {
4143 return this.unknownFields;
4144 }
4145 private CompactionDescriptor(
4146 com.google.protobuf.CodedInputStream input,
4147 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4148 throws com.google.protobuf.InvalidProtocolBufferException {
4149 initFields();
4150 int mutable_bitField0_ = 0;
4151 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4152 com.google.protobuf.UnknownFieldSet.newBuilder();
4153 try {
4154 boolean done = false;
4155 while (!done) {
4156 int tag = input.readTag();
4157 switch (tag) {
4158 case 0:
4159 done = true;
4160 break;
4161 default: {
4162 if (!parseUnknownField(input, unknownFields,
4163 extensionRegistry, tag)) {
4164 done = true;
4165 }
4166 break;
4167 }
4168 case 10: {
4169 bitField0_ |= 0x00000001;
4170 tableName_ = input.readBytes();
4171 break;
4172 }
4173 case 18: {
4174 bitField0_ |= 0x00000002;
4175 encodedRegionName_ = input.readBytes();
4176 break;
4177 }
4178 case 26: {
4179 bitField0_ |= 0x00000004;
4180 familyName_ = input.readBytes();
4181 break;
4182 }
4183 case 34: {
4184 if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
4185 compactionInput_ = new com.google.protobuf.LazyStringArrayList();
4186 mutable_bitField0_ |= 0x00000008;
4187 }
4188 compactionInput_.add(input.readBytes());
4189 break;
4190 }
4191 case 42: {
4192 if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
4193 compactionOutput_ = new com.google.protobuf.LazyStringArrayList();
4194 mutable_bitField0_ |= 0x00000010;
4195 }
4196 compactionOutput_.add(input.readBytes());
4197 break;
4198 }
4199 case 50: {
4200 bitField0_ |= 0x00000008;
4201 storeHomeDir_ = input.readBytes();
4202 break;
4203 }
4204 case 58: {
4205 bitField0_ |= 0x00000010;
4206 regionName_ = input.readBytes();
4207 break;
4208 }
4209 }
4210 }
4211 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4212 throw e.setUnfinishedMessage(this);
4213 } catch (java.io.IOException e) {
4214 throw new com.google.protobuf.InvalidProtocolBufferException(
4215 e.getMessage()).setUnfinishedMessage(this);
4216 } finally {
4217 if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
4218 compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList(compactionInput_);
4219 }
4220 if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
4221 compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(compactionOutput_);
4222 }
4223 this.unknownFields = unknownFields.build();
4224 makeExtensionsImmutable();
4225 }
4226 }
4227 public static final com.google.protobuf.Descriptors.Descriptor
4228 getDescriptor() {
4229 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_descriptor;
4230 }
4231
4232 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4233 internalGetFieldAccessorTable() {
4234 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_fieldAccessorTable
4235 .ensureFieldAccessorsInitialized(
4236 org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class);
4237 }
4238
4239 public static com.google.protobuf.Parser<CompactionDescriptor> PARSER =
4240 new com.google.protobuf.AbstractParser<CompactionDescriptor>() {
4241 public CompactionDescriptor parsePartialFrom(
4242 com.google.protobuf.CodedInputStream input,
4243 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4244 throws com.google.protobuf.InvalidProtocolBufferException {
4245 return new CompactionDescriptor(input, extensionRegistry);
4246 }
4247 };
4248
4249 @java.lang.Override
4250 public com.google.protobuf.Parser<CompactionDescriptor> getParserForType() {
4251 return PARSER;
4252 }
4253
4254 private int bitField0_;
4255
4256 public static final int TABLE_NAME_FIELD_NUMBER = 1;
4257 private com.google.protobuf.ByteString tableName_;
4258
4259
4260
4261
4262
4263
4264
4265 public boolean hasTableName() {
4266 return ((bitField0_ & 0x00000001) == 0x00000001);
4267 }
4268
4269
4270
4271
4272
4273
4274
4275 public com.google.protobuf.ByteString getTableName() {
4276 return tableName_;
4277 }
4278
4279
4280 public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2;
4281 private com.google.protobuf.ByteString encodedRegionName_;
4282
4283
4284
4285 public boolean hasEncodedRegionName() {
4286 return ((bitField0_ & 0x00000002) == 0x00000002);
4287 }
4288
4289
4290
4291 public com.google.protobuf.ByteString getEncodedRegionName() {
4292 return encodedRegionName_;
4293 }
4294
4295
4296 public static final int FAMILY_NAME_FIELD_NUMBER = 3;
4297 private com.google.protobuf.ByteString familyName_;
4298
4299
4300
4301 public boolean hasFamilyName() {
4302 return ((bitField0_ & 0x00000004) == 0x00000004);
4303 }
4304
4305
4306
4307 public com.google.protobuf.ByteString getFamilyName() {
4308 return familyName_;
4309 }
4310
4311
4312 public static final int COMPACTION_INPUT_FIELD_NUMBER = 4;
4313 private com.google.protobuf.LazyStringList compactionInput_;
4314
4315
4316
4317
4318
4319
4320
4321 public java.util.List<java.lang.String>
4322 getCompactionInputList() {
4323 return compactionInput_;
4324 }
4325
4326
4327
4328
4329
4330
4331
4332 public int getCompactionInputCount() {
4333 return compactionInput_.size();
4334 }
4335
4336
4337
4338
4339
4340
4341
4342 public java.lang.String getCompactionInput(int index) {
4343 return compactionInput_.get(index);
4344 }
4345
4346
4347
4348
4349
4350
4351
4352 public com.google.protobuf.ByteString
4353 getCompactionInputBytes(int index) {
4354 return compactionInput_.getByteString(index);
4355 }
4356
4357
4358 public static final int COMPACTION_OUTPUT_FIELD_NUMBER = 5;
4359 private com.google.protobuf.LazyStringList compactionOutput_;
4360
4361
4362
4363 public java.util.List<java.lang.String>
4364 getCompactionOutputList() {
4365 return compactionOutput_;
4366 }
4367
4368
4369
4370 public int getCompactionOutputCount() {
4371 return compactionOutput_.size();
4372 }
4373
4374
4375
4376 public java.lang.String getCompactionOutput(int index) {
4377 return compactionOutput_.get(index);
4378 }
4379
4380
4381
4382 public com.google.protobuf.ByteString
4383 getCompactionOutputBytes(int index) {
4384 return compactionOutput_.getByteString(index);
4385 }
4386
4387
4388 public static final int STORE_HOME_DIR_FIELD_NUMBER = 6;
4389 private java.lang.Object storeHomeDir_;
4390
4391
4392
4393
4394
4395
4396
4397 public boolean hasStoreHomeDir() {
4398 return ((bitField0_ & 0x00000008) == 0x00000008);
4399 }
4400
4401
4402
4403
4404
4405
4406
4407 public java.lang.String getStoreHomeDir() {
4408 java.lang.Object ref = storeHomeDir_;
4409 if (ref instanceof java.lang.String) {
4410 return (java.lang.String) ref;
4411 } else {
4412 com.google.protobuf.ByteString bs =
4413 (com.google.protobuf.ByteString) ref;
4414 java.lang.String s = bs.toStringUtf8();
4415 if (bs.isValidUtf8()) {
4416 storeHomeDir_ = s;
4417 }
4418 return s;
4419 }
4420 }
4421
4422
4423
4424
4425
4426
4427
4428 public com.google.protobuf.ByteString
4429 getStoreHomeDirBytes() {
4430 java.lang.Object ref = storeHomeDir_;
4431 if (ref instanceof java.lang.String) {
4432 com.google.protobuf.ByteString b =
4433 com.google.protobuf.ByteString.copyFromUtf8(
4434 (java.lang.String) ref);
4435 storeHomeDir_ = b;
4436 return b;
4437 } else {
4438 return (com.google.protobuf.ByteString) ref;
4439 }
4440 }
4441
4442
4443 public static final int REGION_NAME_FIELD_NUMBER = 7;
4444 private com.google.protobuf.ByteString regionName_;
4445
4446
4447
4448
4449
4450
4451
4452 public boolean hasRegionName() {
4453 return ((bitField0_ & 0x00000010) == 0x00000010);
4454 }
4455
4456
4457
4458
4459
4460
4461
4462 public com.google.protobuf.ByteString getRegionName() {
4463 return regionName_;
4464 }
4465
4466 private void initFields() {
4467 tableName_ = com.google.protobuf.ByteString.EMPTY;
4468 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
4469 familyName_ = com.google.protobuf.ByteString.EMPTY;
4470 compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4471 compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4472 storeHomeDir_ = "";
4473 regionName_ = com.google.protobuf.ByteString.EMPTY;
4474 }
4475 private byte memoizedIsInitialized = -1;
4476 public final boolean isInitialized() {
4477 byte isInitialized = memoizedIsInitialized;
4478 if (isInitialized != -1) return isInitialized == 1;
4479
4480 if (!hasTableName()) {
4481 memoizedIsInitialized = 0;
4482 return false;
4483 }
4484 if (!hasEncodedRegionName()) {
4485 memoizedIsInitialized = 0;
4486 return false;
4487 }
4488 if (!hasFamilyName()) {
4489 memoizedIsInitialized = 0;
4490 return false;
4491 }
4492 if (!hasStoreHomeDir()) {
4493 memoizedIsInitialized = 0;
4494 return false;
4495 }
4496 memoizedIsInitialized = 1;
4497 return true;
4498 }
4499
4500 public void writeTo(com.google.protobuf.CodedOutputStream output)
4501 throws java.io.IOException {
4502 getSerializedSize();
4503 if (((bitField0_ & 0x00000001) == 0x00000001)) {
4504 output.writeBytes(1, tableName_);
4505 }
4506 if (((bitField0_ & 0x00000002) == 0x00000002)) {
4507 output.writeBytes(2, encodedRegionName_);
4508 }
4509 if (((bitField0_ & 0x00000004) == 0x00000004)) {
4510 output.writeBytes(3, familyName_);
4511 }
4512 for (int i = 0; i < compactionInput_.size(); i++) {
4513 output.writeBytes(4, compactionInput_.getByteString(i));
4514 }
4515 for (int i = 0; i < compactionOutput_.size(); i++) {
4516 output.writeBytes(5, compactionOutput_.getByteString(i));
4517 }
4518 if (((bitField0_ & 0x00000008) == 0x00000008)) {
4519 output.writeBytes(6, getStoreHomeDirBytes());
4520 }
4521 if (((bitField0_ & 0x00000010) == 0x00000010)) {
4522 output.writeBytes(7, regionName_);
4523 }
4524 getUnknownFields().writeTo(output);
4525 }
4526
4527 private int memoizedSerializedSize = -1;
4528 public int getSerializedSize() {
4529 int size = memoizedSerializedSize;
4530 if (size != -1) return size;
4531
4532 size = 0;
4533 if (((bitField0_ & 0x00000001) == 0x00000001)) {
4534 size += com.google.protobuf.CodedOutputStream
4535 .computeBytesSize(1, tableName_);
4536 }
4537 if (((bitField0_ & 0x00000002) == 0x00000002)) {
4538 size += com.google.protobuf.CodedOutputStream
4539 .computeBytesSize(2, encodedRegionName_);
4540 }
4541 if (((bitField0_ & 0x00000004) == 0x00000004)) {
4542 size += com.google.protobuf.CodedOutputStream
4543 .computeBytesSize(3, familyName_);
4544 }
4545 {
4546 int dataSize = 0;
4547 for (int i = 0; i < compactionInput_.size(); i++) {
4548 dataSize += com.google.protobuf.CodedOutputStream
4549 .computeBytesSizeNoTag(compactionInput_.getByteString(i));
4550 }
4551 size += dataSize;
4552 size += 1 * getCompactionInputList().size();
4553 }
4554 {
4555 int dataSize = 0;
4556 for (int i = 0; i < compactionOutput_.size(); i++) {
4557 dataSize += com.google.protobuf.CodedOutputStream
4558 .computeBytesSizeNoTag(compactionOutput_.getByteString(i));
4559 }
4560 size += dataSize;
4561 size += 1 * getCompactionOutputList().size();
4562 }
4563 if (((bitField0_ & 0x00000008) == 0x00000008)) {
4564 size += com.google.protobuf.CodedOutputStream
4565 .computeBytesSize(6, getStoreHomeDirBytes());
4566 }
4567 if (((bitField0_ & 0x00000010) == 0x00000010)) {
4568 size += com.google.protobuf.CodedOutputStream
4569 .computeBytesSize(7, regionName_);
4570 }
4571 size += getUnknownFields().getSerializedSize();
4572 memoizedSerializedSize = size;
4573 return size;
4574 }
4575
4576 private static final long serialVersionUID = 0L;
4577 @java.lang.Override
4578 protected java.lang.Object writeReplace()
4579 throws java.io.ObjectStreamException {
4580 return super.writeReplace();
4581 }
4582
4583 @java.lang.Override
4584 public boolean equals(final java.lang.Object obj) {
4585 if (obj == this) {
4586 return true;
4587 }
4588 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)) {
4589 return super.equals(obj);
4590 }
4591 org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) obj;
4592
4593 boolean result = true;
4594 result = result && (hasTableName() == other.hasTableName());
4595 if (hasTableName()) {
4596 result = result && getTableName()
4597 .equals(other.getTableName());
4598 }
4599 result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
4600 if (hasEncodedRegionName()) {
4601 result = result && getEncodedRegionName()
4602 .equals(other.getEncodedRegionName());
4603 }
4604 result = result && (hasFamilyName() == other.hasFamilyName());
4605 if (hasFamilyName()) {
4606 result = result && getFamilyName()
4607 .equals(other.getFamilyName());
4608 }
4609 result = result && getCompactionInputList()
4610 .equals(other.getCompactionInputList());
4611 result = result && getCompactionOutputList()
4612 .equals(other.getCompactionOutputList());
4613 result = result && (hasStoreHomeDir() == other.hasStoreHomeDir());
4614 if (hasStoreHomeDir()) {
4615 result = result && getStoreHomeDir()
4616 .equals(other.getStoreHomeDir());
4617 }
4618 result = result && (hasRegionName() == other.hasRegionName());
4619 if (hasRegionName()) {
4620 result = result && getRegionName()
4621 .equals(other.getRegionName());
4622 }
4623 result = result &&
4624 getUnknownFields().equals(other.getUnknownFields());
4625 return result;
4626 }
4627
4628 private int memoizedHashCode = 0;
4629 @java.lang.Override
4630 public int hashCode() {
4631 if (memoizedHashCode != 0) {
4632 return memoizedHashCode;
4633 }
4634 int hash = 41;
4635 hash = (19 * hash) + getDescriptorForType().hashCode();
4636 if (hasTableName()) {
4637 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
4638 hash = (53 * hash) + getTableName().hashCode();
4639 }
4640 if (hasEncodedRegionName()) {
4641 hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
4642 hash = (53 * hash) + getEncodedRegionName().hashCode();
4643 }
4644 if (hasFamilyName()) {
4645 hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
4646 hash = (53 * hash) + getFamilyName().hashCode();
4647 }
4648 if (getCompactionInputCount() > 0) {
4649 hash = (37 * hash) + COMPACTION_INPUT_FIELD_NUMBER;
4650 hash = (53 * hash) + getCompactionInputList().hashCode();
4651 }
4652 if (getCompactionOutputCount() > 0) {
4653 hash = (37 * hash) + COMPACTION_OUTPUT_FIELD_NUMBER;
4654 hash = (53 * hash) + getCompactionOutputList().hashCode();
4655 }
4656 if (hasStoreHomeDir()) {
4657 hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER;
4658 hash = (53 * hash) + getStoreHomeDir().hashCode();
4659 }
4660 if (hasRegionName()) {
4661 hash = (37 * hash) + REGION_NAME_FIELD_NUMBER;
4662 hash = (53 * hash) + getRegionName().hashCode();
4663 }
4664 hash = (29 * hash) + getUnknownFields().hashCode();
4665 memoizedHashCode = hash;
4666 return hash;
4667 }
4668
4669 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4670 com.google.protobuf.ByteString data)
4671 throws com.google.protobuf.InvalidProtocolBufferException {
4672 return PARSER.parseFrom(data);
4673 }
4674 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4675 com.google.protobuf.ByteString data,
4676 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4677 throws com.google.protobuf.InvalidProtocolBufferException {
4678 return PARSER.parseFrom(data, extensionRegistry);
4679 }
4680 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(byte[] data)
4681 throws com.google.protobuf.InvalidProtocolBufferException {
4682 return PARSER.parseFrom(data);
4683 }
4684 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4685 byte[] data,
4686 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4687 throws com.google.protobuf.InvalidProtocolBufferException {
4688 return PARSER.parseFrom(data, extensionRegistry);
4689 }
4690 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(java.io.InputStream input)
4691 throws java.io.IOException {
4692 return PARSER.parseFrom(input);
4693 }
4694 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4695 java.io.InputStream input,
4696 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4697 throws java.io.IOException {
4698 return PARSER.parseFrom(input, extensionRegistry);
4699 }
4700 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom(java.io.InputStream input)
4701 throws java.io.IOException {
4702 return PARSER.parseDelimitedFrom(input);
4703 }
4704 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom(
4705 java.io.InputStream input,
4706 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4707 throws java.io.IOException {
4708 return PARSER.parseDelimitedFrom(input, extensionRegistry);
4709 }
4710 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4711 com.google.protobuf.CodedInputStream input)
4712 throws java.io.IOException {
4713 return PARSER.parseFrom(input);
4714 }
4715 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4716 com.google.protobuf.CodedInputStream input,
4717 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4718 throws java.io.IOException {
4719 return PARSER.parseFrom(input, extensionRegistry);
4720 }
4721
4722 public static Builder newBuilder() { return Builder.create(); }
4723 public Builder newBuilderForType() { return newBuilder(); }
4724 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor prototype) {
4725 return newBuilder().mergeFrom(prototype);
4726 }
4727 public Builder toBuilder() { return newBuilder(this); }
4728
4729 @java.lang.Override
4730 protected Builder newBuilderForType(
4731 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4732 Builder builder = new Builder(parent);
4733 return builder;
4734 }
4735
4736
4737
4738
4739
4740
4741
4742
4743
4744
4745
4746 public static final class Builder extends
4747 com.google.protobuf.GeneratedMessage.Builder<Builder>
4748 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptorOrBuilder {
4749 public static final com.google.protobuf.Descriptors.Descriptor
4750 getDescriptor() {
4751 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_descriptor;
4752 }
4753
4754 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4755 internalGetFieldAccessorTable() {
4756 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_fieldAccessorTable
4757 .ensureFieldAccessorsInitialized(
4758 org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class);
4759 }
4760
4761
4762 private Builder() {
4763 maybeForceBuilderInitialization();
4764 }
4765
4766 private Builder(
4767 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4768 super(parent);
4769 maybeForceBuilderInitialization();
4770 }
4771 private void maybeForceBuilderInitialization() {
4772 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4773 }
4774 }
4775 private static Builder create() {
4776 return new Builder();
4777 }
4778
4779 public Builder clear() {
4780 super.clear();
4781 tableName_ = com.google.protobuf.ByteString.EMPTY;
4782 bitField0_ = (bitField0_ & ~0x00000001);
4783 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
4784 bitField0_ = (bitField0_ & ~0x00000002);
4785 familyName_ = com.google.protobuf.ByteString.EMPTY;
4786 bitField0_ = (bitField0_ & ~0x00000004);
4787 compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4788 bitField0_ = (bitField0_ & ~0x00000008);
4789 compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4790 bitField0_ = (bitField0_ & ~0x00000010);
4791 storeHomeDir_ = "";
4792 bitField0_ = (bitField0_ & ~0x00000020);
4793 regionName_ = com.google.protobuf.ByteString.EMPTY;
4794 bitField0_ = (bitField0_ & ~0x00000040);
4795 return this;
4796 }
4797
4798 public Builder clone() {
4799 return create().mergeFrom(buildPartial());
4800 }
4801
4802 public com.google.protobuf.Descriptors.Descriptor
4803 getDescriptorForType() {
4804 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_descriptor;
4805 }
4806
4807 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstanceForType() {
4808 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance();
4809 }
4810
4811 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor build() {
4812 org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = buildPartial();
4813 if (!result.isInitialized()) {
4814 throw newUninitializedMessageException(result);
4815 }
4816 return result;
4817 }
4818
4819 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor buildPartial() {
4820 org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor(this);
4821 int from_bitField0_ = bitField0_;
4822 int to_bitField0_ = 0;
4823 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4824 to_bitField0_ |= 0x00000001;
4825 }
4826 result.tableName_ = tableName_;
4827 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4828 to_bitField0_ |= 0x00000002;
4829 }
4830 result.encodedRegionName_ = encodedRegionName_;
4831 if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4832 to_bitField0_ |= 0x00000004;
4833 }
4834 result.familyName_ = familyName_;
4835 if (((bitField0_ & 0x00000008) == 0x00000008)) {
4836 compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList(
4837 compactionInput_);
4838 bitField0_ = (bitField0_ & ~0x00000008);
4839 }
4840 result.compactionInput_ = compactionInput_;
4841 if (((bitField0_ & 0x00000010) == 0x00000010)) {
4842 compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(
4843 compactionOutput_);
4844 bitField0_ = (bitField0_ & ~0x00000010);
4845 }
4846 result.compactionOutput_ = compactionOutput_;
4847 if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
4848 to_bitField0_ |= 0x00000008;
4849 }
4850 result.storeHomeDir_ = storeHomeDir_;
4851 if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
4852 to_bitField0_ |= 0x00000010;
4853 }
4854 result.regionName_ = regionName_;
4855 result.bitField0_ = to_bitField0_;
4856 onBuilt();
4857 return result;
4858 }
4859
4860 public Builder mergeFrom(com.google.protobuf.Message other) {
4861 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) {
4862 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)other);
4863 } else {
4864 super.mergeFrom(other);
4865 return this;
4866 }
4867 }
4868
4869 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other) {
4870 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance()) return this;
4871 if (other.hasTableName()) {
4872 setTableName(other.getTableName());
4873 }
4874 if (other.hasEncodedRegionName()) {
4875 setEncodedRegionName(other.getEncodedRegionName());
4876 }
4877 if (other.hasFamilyName()) {
4878 setFamilyName(other.getFamilyName());
4879 }
4880 if (!other.compactionInput_.isEmpty()) {
4881 if (compactionInput_.isEmpty()) {
4882 compactionInput_ = other.compactionInput_;
4883 bitField0_ = (bitField0_ & ~0x00000008);
4884 } else {
4885 ensureCompactionInputIsMutable();
4886 compactionInput_.addAll(other.compactionInput_);
4887 }
4888 onChanged();
4889 }
4890 if (!other.compactionOutput_.isEmpty()) {
4891 if (compactionOutput_.isEmpty()) {
4892 compactionOutput_ = other.compactionOutput_;
4893 bitField0_ = (bitField0_ & ~0x00000010);
4894 } else {
4895 ensureCompactionOutputIsMutable();
4896 compactionOutput_.addAll(other.compactionOutput_);
4897 }
4898 onChanged();
4899 }
4900 if (other.hasStoreHomeDir()) {
4901 bitField0_ |= 0x00000020;
4902 storeHomeDir_ = other.storeHomeDir_;
4903 onChanged();
4904 }
4905 if (other.hasRegionName()) {
4906 setRegionName(other.getRegionName());
4907 }
4908 this.mergeUnknownFields(other.getUnknownFields());
4909 return this;
4910 }
4911
4912 public final boolean isInitialized() {
4913 if (!hasTableName()) {
4914
4915 return false;
4916 }
4917 if (!hasEncodedRegionName()) {
4918
4919 return false;
4920 }
4921 if (!hasFamilyName()) {
4922
4923 return false;
4924 }
4925 if (!hasStoreHomeDir()) {
4926
4927 return false;
4928 }
4929 return true;
4930 }
4931
4932 public Builder mergeFrom(
4933 com.google.protobuf.CodedInputStream input,
4934 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4935 throws java.io.IOException {
4936 org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parsedMessage = null;
4937 try {
4938 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4939 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4940 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) e.getUnfinishedMessage();
4941 throw e;
4942 } finally {
4943 if (parsedMessage != null) {
4944 mergeFrom(parsedMessage);
4945 }
4946 }
4947 return this;
4948 }
4949 private int bitField0_;
4950
4951
4952 private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
4953
4954
4955
4956
4957
4958
4959
4960 public boolean hasTableName() {
4961 return ((bitField0_ & 0x00000001) == 0x00000001);
4962 }
4963
4964
4965
4966
4967
4968
4969
4970 public com.google.protobuf.ByteString getTableName() {
4971 return tableName_;
4972 }
4973
4974
4975
4976
4977
4978
4979
4980 public Builder setTableName(com.google.protobuf.ByteString value) {
4981 if (value == null) {
4982 throw new NullPointerException();
4983 }
4984 bitField0_ |= 0x00000001;
4985 tableName_ = value;
4986 onChanged();
4987 return this;
4988 }
4989
4990
4991
4992
4993
4994
4995
4996 public Builder clearTableName() {
4997 bitField0_ = (bitField0_ & ~0x00000001);
4998 tableName_ = getDefaultInstance().getTableName();
4999 onChanged();
5000 return this;
5001 }
5002
5003
5004 private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
5005
5006
5007
5008 public boolean hasEncodedRegionName() {
5009 return ((bitField0_ & 0x00000002) == 0x00000002);
5010 }
5011
5012
5013
5014 public com.google.protobuf.ByteString getEncodedRegionName() {
5015 return encodedRegionName_;
5016 }
5017
5018
5019
5020 public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
5021 if (value == null) {
5022 throw new NullPointerException();
5023 }
5024 bitField0_ |= 0x00000002;
5025 encodedRegionName_ = value;
5026 onChanged();
5027 return this;
5028 }
5029
5030
5031
5032 public Builder clearEncodedRegionName() {
5033 bitField0_ = (bitField0_ & ~0x00000002);
5034 encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
5035 onChanged();
5036 return this;
5037 }
5038
5039
5040 private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY;
5041
5042
5043
5044 public boolean hasFamilyName() {
5045 return ((bitField0_ & 0x00000004) == 0x00000004);
5046 }
5047
5048
5049
5050 public com.google.protobuf.ByteString getFamilyName() {
5051 return familyName_;
5052 }
5053
5054
5055
5056 public Builder setFamilyName(com.google.protobuf.ByteString value) {
5057 if (value == null) {
5058 throw new NullPointerException();
5059 }
5060 bitField0_ |= 0x00000004;
5061 familyName_ = value;
5062 onChanged();
5063 return this;
5064 }
5065
5066
5067
5068 public Builder clearFamilyName() {
5069 bitField0_ = (bitField0_ & ~0x00000004);
5070 familyName_ = getDefaultInstance().getFamilyName();
5071 onChanged();
5072 return this;
5073 }
5074
5075
5076 private com.google.protobuf.LazyStringList compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
5077 private void ensureCompactionInputIsMutable() {
5078 if (!((bitField0_ & 0x00000008) == 0x00000008)) {
5079 compactionInput_ = new com.google.protobuf.LazyStringArrayList(compactionInput_);
5080 bitField0_ |= 0x00000008;
5081 }
5082 }
5083
5084
5085
5086
5087
5088
5089
5090 public java.util.List<java.lang.String>
5091 getCompactionInputList() {
5092 return java.util.Collections.unmodifiableList(compactionInput_);
5093 }
5094
5095
5096
5097
5098
5099
5100
5101 public int getCompactionInputCount() {
5102 return compactionInput_.size();
5103 }
5104
5105
5106
5107
5108
5109
5110
5111 public java.lang.String getCompactionInput(int index) {
5112 return compactionInput_.get(index);
5113 }
5114
5115
5116
5117
5118
5119
5120
5121 public com.google.protobuf.ByteString
5122 getCompactionInputBytes(int index) {
5123 return compactionInput_.getByteString(index);
5124 }
5125
5126
5127
5128
5129
5130
5131
5132 public Builder setCompactionInput(
5133 int index, java.lang.String value) {
5134 if (value == null) {
5135 throw new NullPointerException();
5136 }
5137 ensureCompactionInputIsMutable();
5138 compactionInput_.set(index, value);
5139 onChanged();
5140 return this;
5141 }
5142
5143
5144
5145
5146
5147
5148
5149 public Builder addCompactionInput(
5150 java.lang.String value) {
5151 if (value == null) {
5152 throw new NullPointerException();
5153 }
5154 ensureCompactionInputIsMutable();
5155 compactionInput_.add(value);
5156 onChanged();
5157 return this;
5158 }
5159
5160
5161
5162
5163
5164
5165
5166 public Builder addAllCompactionInput(
5167 java.lang.Iterable<java.lang.String> values) {
5168 ensureCompactionInputIsMutable();
5169 super.addAll(values, compactionInput_);
5170 onChanged();
5171 return this;
5172 }
5173
5174
5175
5176
5177
5178
5179
5180 public Builder clearCompactionInput() {
5181 compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
5182 bitField0_ = (bitField0_ & ~0x00000008);
5183 onChanged();
5184 return this;
5185 }
5186
5187
5188
5189
5190
5191
5192
5193 public Builder addCompactionInputBytes(
5194 com.google.protobuf.ByteString value) {
5195 if (value == null) {
5196 throw new NullPointerException();
5197 }
5198 ensureCompactionInputIsMutable();
5199 compactionInput_.add(value);
5200 onChanged();
5201 return this;
5202 }
5203
5204
5205 private com.google.protobuf.LazyStringList compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
5206 private void ensureCompactionOutputIsMutable() {
5207 if (!((bitField0_ & 0x00000010) == 0x00000010)) {
5208 compactionOutput_ = new com.google.protobuf.LazyStringArrayList(compactionOutput_);
5209 bitField0_ |= 0x00000010;
5210 }
5211 }
5212
5213
5214
5215 public java.util.List<java.lang.String>
5216 getCompactionOutputList() {
5217 return java.util.Collections.unmodifiableList(compactionOutput_);
5218 }
5219
5220
5221
5222 public int getCompactionOutputCount() {
5223 return compactionOutput_.size();
5224 }
5225
5226
5227
5228 public java.lang.String getCompactionOutput(int index) {
5229 return compactionOutput_.get(index);
5230 }
5231
5232
5233
5234 public com.google.protobuf.ByteString
5235 getCompactionOutputBytes(int index) {
5236 return compactionOutput_.getByteString(index);
5237 }
5238
5239
5240
5241 public Builder setCompactionOutput(
5242 int index, java.lang.String value) {
5243 if (value == null) {
5244 throw new NullPointerException();
5245 }
5246 ensureCompactionOutputIsMutable();
5247 compactionOutput_.set(index, value);
5248 onChanged();
5249 return this;
5250 }
5251
5252
5253
5254 public Builder addCompactionOutput(
5255 java.lang.String value) {
5256 if (value == null) {
5257 throw new NullPointerException();
5258 }
5259 ensureCompactionOutputIsMutable();
5260 compactionOutput_.add(value);
5261 onChanged();
5262 return this;
5263 }
5264
5265
5266
5267 public Builder addAllCompactionOutput(
5268 java.lang.Iterable<java.lang.String> values) {
5269 ensureCompactionOutputIsMutable();
5270 super.addAll(values, compactionOutput_);
5271 onChanged();
5272 return this;
5273 }
5274
5275
5276
5277 public Builder clearCompactionOutput() {
5278 compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
5279 bitField0_ = (bitField0_ & ~0x00000010);
5280 onChanged();
5281 return this;
5282 }
5283
5284
5285
5286 public Builder addCompactionOutputBytes(
5287 com.google.protobuf.ByteString value) {
5288 if (value == null) {
5289 throw new NullPointerException();
5290 }
5291 ensureCompactionOutputIsMutable();
5292 compactionOutput_.add(value);
5293 onChanged();
5294 return this;
5295 }
5296
5297
5298 private java.lang.Object storeHomeDir_ = "";
5299
5300
5301
5302
5303
5304
5305
5306 public boolean hasStoreHomeDir() {
5307 return ((bitField0_ & 0x00000020) == 0x00000020);
5308 }
5309
5310
5311
5312
5313
5314
5315
5316 public java.lang.String getStoreHomeDir() {
5317 java.lang.Object ref = storeHomeDir_;
5318 if (!(ref instanceof java.lang.String)) {
5319 java.lang.String s = ((com.google.protobuf.ByteString) ref)
5320 .toStringUtf8();
5321 storeHomeDir_ = s;
5322 return s;
5323 } else {
5324 return (java.lang.String) ref;
5325 }
5326 }
5327
5328
5329
5330
5331
5332
5333
5334 public com.google.protobuf.ByteString
5335 getStoreHomeDirBytes() {
5336 java.lang.Object ref = storeHomeDir_;
5337 if (ref instanceof String) {
5338 com.google.protobuf.ByteString b =
5339 com.google.protobuf.ByteString.copyFromUtf8(
5340 (java.lang.String) ref);
5341 storeHomeDir_ = b;
5342 return b;
5343 } else {
5344 return (com.google.protobuf.ByteString) ref;
5345 }
5346 }
5347
5348
5349
5350
5351
5352
5353
5354 public Builder setStoreHomeDir(
5355 java.lang.String value) {
5356 if (value == null) {
5357 throw new NullPointerException();
5358 }
5359 bitField0_ |= 0x00000020;
5360 storeHomeDir_ = value;
5361 onChanged();
5362 return this;
5363 }
5364
5365
5366
5367
5368
5369
5370
5371 public Builder clearStoreHomeDir() {
5372 bitField0_ = (bitField0_ & ~0x00000020);
5373 storeHomeDir_ = getDefaultInstance().getStoreHomeDir();
5374 onChanged();
5375 return this;
5376 }
5377
5378
5379
5380
5381
5382
5383
5384 public Builder setStoreHomeDirBytes(
5385 com.google.protobuf.ByteString value) {
5386 if (value == null) {
5387 throw new NullPointerException();
5388 }
5389 bitField0_ |= 0x00000020;
5390 storeHomeDir_ = value;
5391 onChanged();
5392 return this;
5393 }
5394
5395
5396 private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY;
5397
5398
5399
5400
5401
5402
5403
5404 public boolean hasRegionName() {
5405 return ((bitField0_ & 0x00000040) == 0x00000040);
5406 }
5407
5408
5409
5410
5411
5412
5413
5414 public com.google.protobuf.ByteString getRegionName() {
5415 return regionName_;
5416 }
5417
5418
5419
5420
5421
5422
5423
5424 public Builder setRegionName(com.google.protobuf.ByteString value) {
5425 if (value == null) {
5426 throw new NullPointerException();
5427 }
5428 bitField0_ |= 0x00000040;
5429 regionName_ = value;
5430 onChanged();
5431 return this;
5432 }
5433
5434
5435
5436
5437
5438
5439
5440 public Builder clearRegionName() {
5441 bitField0_ = (bitField0_ & ~0x00000040);
5442 regionName_ = getDefaultInstance().getRegionName();
5443 onChanged();
5444 return this;
5445 }
5446
5447
5448 }
5449
5450 static {
5451 defaultInstance = new CompactionDescriptor(true);
5452 defaultInstance.initFields();
5453 }
5454
5455
5456 }
5457
5458 public interface FlushDescriptorOrBuilder
5459 extends com.google.protobuf.MessageOrBuilder {
5460
5461
5462
5463
5464
5465 boolean hasAction();
5466
5467
5468
5469 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction();
5470
5471
5472
5473
5474
5475 boolean hasTableName();
5476
5477
5478
5479 com.google.protobuf.ByteString getTableName();
5480
5481
5482
5483
5484
5485 boolean hasEncodedRegionName();
5486
5487
5488
5489 com.google.protobuf.ByteString getEncodedRegionName();
5490
5491
5492
5493
5494
5495 boolean hasFlushSequenceNumber();
5496
5497
5498
5499 long getFlushSequenceNumber();
5500
5501
5502
5503
5504
5505 java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>
5506 getStoreFlushesList();
5507
5508
5509
5510 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index);
5511
5512
5513
5514 int getStoreFlushesCount();
5515
5516
5517
5518 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>
5519 getStoreFlushesOrBuilderList();
5520
5521
5522
5523 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder(
5524 int index);
5525
5526
5527
5528
5529
5530
5531
5532
5533
5534 boolean hasRegionName();
5535
5536
5537
5538
5539
5540
5541
5542 com.google.protobuf.ByteString getRegionName();
5543 }
5544
5545
5546
5547
5548
5549
5550
5551
5552 public static final class FlushDescriptor extends
5553 com.google.protobuf.GeneratedMessage
5554 implements FlushDescriptorOrBuilder {
5555
5556 private FlushDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5557 super(builder);
5558 this.unknownFields = builder.getUnknownFields();
5559 }
5560 private FlushDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5561
5562 private static final FlushDescriptor defaultInstance;
5563 public static FlushDescriptor getDefaultInstance() {
5564 return defaultInstance;
5565 }
5566
5567 public FlushDescriptor getDefaultInstanceForType() {
5568 return defaultInstance;
5569 }
5570
5571 private final com.google.protobuf.UnknownFieldSet unknownFields;
5572 @java.lang.Override
5573 public final com.google.protobuf.UnknownFieldSet
5574 getUnknownFields() {
5575 return this.unknownFields;
5576 }
5577 private FlushDescriptor(
5578 com.google.protobuf.CodedInputStream input,
5579 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5580 throws com.google.protobuf.InvalidProtocolBufferException {
5581 initFields();
5582 int mutable_bitField0_ = 0;
5583 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5584 com.google.protobuf.UnknownFieldSet.newBuilder();
5585 try {
5586 boolean done = false;
5587 while (!done) {
5588 int tag = input.readTag();
5589 switch (tag) {
5590 case 0:
5591 done = true;
5592 break;
5593 default: {
5594 if (!parseUnknownField(input, unknownFields,
5595 extensionRegistry, tag)) {
5596 done = true;
5597 }
5598 break;
5599 }
5600 case 8: {
5601 int rawValue = input.readEnum();
5602 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.valueOf(rawValue);
5603 if (value == null) {
5604 unknownFields.mergeVarintField(1, rawValue);
5605 } else {
5606 bitField0_ |= 0x00000001;
5607 action_ = value;
5608 }
5609 break;
5610 }
5611 case 18: {
5612 bitField0_ |= 0x00000002;
5613 tableName_ = input.readBytes();
5614 break;
5615 }
5616 case 26: {
5617 bitField0_ |= 0x00000004;
5618 encodedRegionName_ = input.readBytes();
5619 break;
5620 }
5621 case 32: {
5622 bitField0_ |= 0x00000008;
5623 flushSequenceNumber_ = input.readUInt64();
5624 break;
5625 }
5626 case 42: {
5627 if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
5628 storeFlushes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>();
5629 mutable_bitField0_ |= 0x00000010;
5630 }
5631 storeFlushes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.PARSER, extensionRegistry));
5632 break;
5633 }
5634 case 50: {
5635 bitField0_ |= 0x00000010;
5636 regionName_ = input.readBytes();
5637 break;
5638 }
5639 }
5640 }
5641 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5642 throw e.setUnfinishedMessage(this);
5643 } catch (java.io.IOException e) {
5644 throw new com.google.protobuf.InvalidProtocolBufferException(
5645 e.getMessage()).setUnfinishedMessage(this);
5646 } finally {
5647 if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
5648 storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_);
5649 }
5650 this.unknownFields = unknownFields.build();
5651 makeExtensionsImmutable();
5652 }
5653 }
5654 public static final com.google.protobuf.Descriptors.Descriptor
5655 getDescriptor() {
5656 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_descriptor;
5657 }
5658
5659 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5660 internalGetFieldAccessorTable() {
5661 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_fieldAccessorTable
5662 .ensureFieldAccessorsInitialized(
5663 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.Builder.class);
5664 }
5665
5666 public static com.google.protobuf.Parser<FlushDescriptor> PARSER =
5667 new com.google.protobuf.AbstractParser<FlushDescriptor>() {
5668 public FlushDescriptor parsePartialFrom(
5669 com.google.protobuf.CodedInputStream input,
5670 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5671 throws com.google.protobuf.InvalidProtocolBufferException {
5672 return new FlushDescriptor(input, extensionRegistry);
5673 }
5674 };
5675
5676 @java.lang.Override
5677 public com.google.protobuf.Parser<FlushDescriptor> getParserForType() {
5678 return PARSER;
5679 }
5680
5681
5682
5683
5684 public enum FlushAction
5685 implements com.google.protobuf.ProtocolMessageEnum {
5686
5687
5688
5689 START_FLUSH(0, 0),
5690
5691
5692
5693 COMMIT_FLUSH(1, 1),
5694
5695
5696
5697 ABORT_FLUSH(2, 2),
5698
5699
5700
5701
5702
5703
5704
5705 CANNOT_FLUSH(3, 3),
5706 ;
5707
5708
5709
5710
5711 public static final int START_FLUSH_VALUE = 0;
5712
5713
5714
5715 public static final int COMMIT_FLUSH_VALUE = 1;
5716
5717
5718
5719 public static final int ABORT_FLUSH_VALUE = 2;
5720
5721
5722
5723
5724
5725
5726
5727 public static final int CANNOT_FLUSH_VALUE = 3;
5728
5729
5730 public final int getNumber() { return value; }
5731
5732 public static FlushAction valueOf(int value) {
5733 switch (value) {
5734 case 0: return START_FLUSH;
5735 case 1: return COMMIT_FLUSH;
5736 case 2: return ABORT_FLUSH;
5737 case 3: return CANNOT_FLUSH;
5738 default: return null;
5739 }
5740 }
5741
5742 public static com.google.protobuf.Internal.EnumLiteMap<FlushAction>
5743 internalGetValueMap() {
5744 return internalValueMap;
5745 }
5746 private static com.google.protobuf.Internal.EnumLiteMap<FlushAction>
5747 internalValueMap =
5748 new com.google.protobuf.Internal.EnumLiteMap<FlushAction>() {
5749 public FlushAction findValueByNumber(int number) {
5750 return FlushAction.valueOf(number);
5751 }
5752 };
5753
5754 public final com.google.protobuf.Descriptors.EnumValueDescriptor
5755 getValueDescriptor() {
5756 return getDescriptor().getValues().get(index);
5757 }
5758 public final com.google.protobuf.Descriptors.EnumDescriptor
5759 getDescriptorForType() {
5760 return getDescriptor();
5761 }
5762 public static final com.google.protobuf.Descriptors.EnumDescriptor
5763 getDescriptor() {
5764 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDescriptor().getEnumTypes().get(0);
5765 }
5766
5767 private static final FlushAction[] VALUES = values();
5768
5769 public static FlushAction valueOf(
5770 com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
5771 if (desc.getType() != getDescriptor()) {
5772 throw new java.lang.IllegalArgumentException(
5773 "EnumValueDescriptor is not for this type.");
5774 }
5775 return VALUES[desc.getIndex()];
5776 }
5777
5778 private final int index;
5779 private final int value;
5780
5781 private FlushAction(int index, int value) {
5782 this.index = index;
5783 this.value = value;
5784 }
5785
5786
5787 }
5788
5789 public interface StoreFlushDescriptorOrBuilder
5790 extends com.google.protobuf.MessageOrBuilder {
5791
5792
5793
5794
5795
5796 boolean hasFamilyName();
5797
5798
5799
5800 com.google.protobuf.ByteString getFamilyName();
5801
5802
5803
5804
5805
5806
5807
5808
5809
5810 boolean hasStoreHomeDir();
5811
5812
5813
5814
5815
5816
5817
5818 java.lang.String getStoreHomeDir();
5819
5820
5821
5822
5823
5824
5825
5826 com.google.protobuf.ByteString
5827 getStoreHomeDirBytes();
5828
5829
5830
5831
5832
5833
5834
5835
5836
5837 java.util.List<java.lang.String>
5838 getFlushOutputList();
5839
5840
5841
5842
5843
5844
5845
5846 int getFlushOutputCount();
5847
5848
5849
5850
5851
5852
5853
5854 java.lang.String getFlushOutput(int index);
5855
5856
5857
5858
5859
5860
5861
5862 com.google.protobuf.ByteString
5863 getFlushOutputBytes(int index);
5864 }
5865
5866
5867
5868 public static final class StoreFlushDescriptor extends
5869 com.google.protobuf.GeneratedMessage
5870 implements StoreFlushDescriptorOrBuilder {
5871
5872 private StoreFlushDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5873 super(builder);
5874 this.unknownFields = builder.getUnknownFields();
5875 }
5876 private StoreFlushDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5877
5878 private static final StoreFlushDescriptor defaultInstance;
5879 public static StoreFlushDescriptor getDefaultInstance() {
5880 return defaultInstance;
5881 }
5882
5883 public StoreFlushDescriptor getDefaultInstanceForType() {
5884 return defaultInstance;
5885 }
5886
5887 private final com.google.protobuf.UnknownFieldSet unknownFields;
5888 @java.lang.Override
5889 public final com.google.protobuf.UnknownFieldSet
5890 getUnknownFields() {
5891 return this.unknownFields;
5892 }
5893 private StoreFlushDescriptor(
5894 com.google.protobuf.CodedInputStream input,
5895 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5896 throws com.google.protobuf.InvalidProtocolBufferException {
5897 initFields();
5898 int mutable_bitField0_ = 0;
5899 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5900 com.google.protobuf.UnknownFieldSet.newBuilder();
5901 try {
5902 boolean done = false;
5903 while (!done) {
5904 int tag = input.readTag();
5905 switch (tag) {
5906 case 0:
5907 done = true;
5908 break;
5909 default: {
5910 if (!parseUnknownField(input, unknownFields,
5911 extensionRegistry, tag)) {
5912 done = true;
5913 }
5914 break;
5915 }
5916 case 10: {
5917 bitField0_ |= 0x00000001;
5918 familyName_ = input.readBytes();
5919 break;
5920 }
5921 case 18: {
5922 bitField0_ |= 0x00000002;
5923 storeHomeDir_ = input.readBytes();
5924 break;
5925 }
5926 case 26: {
5927 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
5928 flushOutput_ = new com.google.protobuf.LazyStringArrayList();
5929 mutable_bitField0_ |= 0x00000004;
5930 }
5931 flushOutput_.add(input.readBytes());
5932 break;
5933 }
5934 }
5935 }
5936 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5937 throw e.setUnfinishedMessage(this);
5938 } catch (java.io.IOException e) {
5939 throw new com.google.protobuf.InvalidProtocolBufferException(
5940 e.getMessage()).setUnfinishedMessage(this);
5941 } finally {
5942 if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
5943 flushOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(flushOutput_);
5944 }
5945 this.unknownFields = unknownFields.build();
5946 makeExtensionsImmutable();
5947 }
5948 }
5949 public static final com.google.protobuf.Descriptors.Descriptor
5950 getDescriptor() {
5951 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_StoreFlushDescriptor_descriptor;
5952 }
5953
5954 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5955 internalGetFieldAccessorTable() {
5956 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable
5957 .ensureFieldAccessorsInitialized(
5958 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class);
5959 }
5960
5961 public static com.google.protobuf.Parser<StoreFlushDescriptor> PARSER =
5962 new com.google.protobuf.AbstractParser<StoreFlushDescriptor>() {
5963 public StoreFlushDescriptor parsePartialFrom(
5964 com.google.protobuf.CodedInputStream input,
5965 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5966 throws com.google.protobuf.InvalidProtocolBufferException {
5967 return new StoreFlushDescriptor(input, extensionRegistry);
5968 }
5969 };
5970
5971 @java.lang.Override
5972 public com.google.protobuf.Parser<StoreFlushDescriptor> getParserForType() {
5973 return PARSER;
5974 }
5975
5976 private int bitField0_;
5977
5978 public static final int FAMILY_NAME_FIELD_NUMBER = 1;
5979 private com.google.protobuf.ByteString familyName_;
5980
5981
5982
5983 public boolean hasFamilyName() {
5984 return ((bitField0_ & 0x00000001) == 0x00000001);
5985 }
5986
5987
5988
5989 public com.google.protobuf.ByteString getFamilyName() {
5990 return familyName_;
5991 }
5992
5993
5994 public static final int STORE_HOME_DIR_FIELD_NUMBER = 2;
5995 private java.lang.Object storeHomeDir_;
5996
5997
5998
5999
6000
6001
6002
6003 public boolean hasStoreHomeDir() {
6004 return ((bitField0_ & 0x00000002) == 0x00000002);
6005 }
6006
6007
6008
6009
6010
6011
6012
6013 public java.lang.String getStoreHomeDir() {
6014 java.lang.Object ref = storeHomeDir_;
6015 if (ref instanceof java.lang.String) {
6016 return (java.lang.String) ref;
6017 } else {
6018 com.google.protobuf.ByteString bs =
6019 (com.google.protobuf.ByteString) ref;
6020 java.lang.String s = bs.toStringUtf8();
6021 if (bs.isValidUtf8()) {
6022 storeHomeDir_ = s;
6023 }
6024 return s;
6025 }
6026 }
6027
6028
6029
6030
6031
6032
6033
6034 public com.google.protobuf.ByteString
6035 getStoreHomeDirBytes() {
6036 java.lang.Object ref = storeHomeDir_;
6037 if (ref instanceof java.lang.String) {
6038 com.google.protobuf.ByteString b =
6039 com.google.protobuf.ByteString.copyFromUtf8(
6040 (java.lang.String) ref);
6041 storeHomeDir_ = b;
6042 return b;
6043 } else {
6044 return (com.google.protobuf.ByteString) ref;
6045 }
6046 }
6047
6048
6049 public static final int FLUSH_OUTPUT_FIELD_NUMBER = 3;
6050 private com.google.protobuf.LazyStringList flushOutput_;
6051
6052
6053
6054
6055
6056
6057
6058 public java.util.List<java.lang.String>
6059 getFlushOutputList() {
6060 return flushOutput_;
6061 }
6062
6063
6064
6065
6066
6067
6068
6069 public int getFlushOutputCount() {
6070 return flushOutput_.size();
6071 }
6072
6073
6074
6075
6076
6077
6078
6079 public java.lang.String getFlushOutput(int index) {
6080 return flushOutput_.get(index);
6081 }
6082
6083
6084
6085
6086
6087
6088
6089 public com.google.protobuf.ByteString
6090 getFlushOutputBytes(int index) {
6091 return flushOutput_.getByteString(index);
6092 }
6093
6094 private void initFields() {
6095 familyName_ = com.google.protobuf.ByteString.EMPTY;
6096 storeHomeDir_ = "";
6097 flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
6098 }
6099 private byte memoizedIsInitialized = -1;
6100 public final boolean isInitialized() {
6101 byte isInitialized = memoizedIsInitialized;
6102 if (isInitialized != -1) return isInitialized == 1;
6103
6104 if (!hasFamilyName()) {
6105 memoizedIsInitialized = 0;
6106 return false;
6107 }
6108 if (!hasStoreHomeDir()) {
6109 memoizedIsInitialized = 0;
6110 return false;
6111 }
6112 memoizedIsInitialized = 1;
6113 return true;
6114 }
6115
6116 public void writeTo(com.google.protobuf.CodedOutputStream output)
6117 throws java.io.IOException {
6118 getSerializedSize();
6119 if (((bitField0_ & 0x00000001) == 0x00000001)) {
6120 output.writeBytes(1, familyName_);
6121 }
6122 if (((bitField0_ & 0x00000002) == 0x00000002)) {
6123 output.writeBytes(2, getStoreHomeDirBytes());
6124 }
6125 for (int i = 0; i < flushOutput_.size(); i++) {
6126 output.writeBytes(3, flushOutput_.getByteString(i));
6127 }
6128 getUnknownFields().writeTo(output);
6129 }
6130
6131 private int memoizedSerializedSize = -1;
6132 public int getSerializedSize() {
6133 int size = memoizedSerializedSize;
6134 if (size != -1) return size;
6135
6136 size = 0;
6137 if (((bitField0_ & 0x00000001) == 0x00000001)) {
6138 size += com.google.protobuf.CodedOutputStream
6139 .computeBytesSize(1, familyName_);
6140 }
6141 if (((bitField0_ & 0x00000002) == 0x00000002)) {
6142 size += com.google.protobuf.CodedOutputStream
6143 .computeBytesSize(2, getStoreHomeDirBytes());
6144 }
6145 {
6146 int dataSize = 0;
6147 for (int i = 0; i < flushOutput_.size(); i++) {
6148 dataSize += com.google.protobuf.CodedOutputStream
6149 .computeBytesSizeNoTag(flushOutput_.getByteString(i));
6150 }
6151 size += dataSize;
6152 size += 1 * getFlushOutputList().size();
6153 }
6154 size += getUnknownFields().getSerializedSize();
6155 memoizedSerializedSize = size;
6156 return size;
6157 }
6158
6159 private static final long serialVersionUID = 0L;
6160 @java.lang.Override
6161 protected java.lang.Object writeReplace()
6162 throws java.io.ObjectStreamException {
6163 return super.writeReplace();
6164 }
6165
6166 @java.lang.Override
6167 public boolean equals(final java.lang.Object obj) {
6168 if (obj == this) {
6169 return true;
6170 }
6171 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)) {
6172 return super.equals(obj);
6173 }
6174 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) obj;
6175
6176 boolean result = true;
6177 result = result && (hasFamilyName() == other.hasFamilyName());
6178 if (hasFamilyName()) {
6179 result = result && getFamilyName()
6180 .equals(other.getFamilyName());
6181 }
6182 result = result && (hasStoreHomeDir() == other.hasStoreHomeDir());
6183 if (hasStoreHomeDir()) {
6184 result = result && getStoreHomeDir()
6185 .equals(other.getStoreHomeDir());
6186 }
6187 result = result && getFlushOutputList()
6188 .equals(other.getFlushOutputList());
6189 result = result &&
6190 getUnknownFields().equals(other.getUnknownFields());
6191 return result;
6192 }
6193
6194 private int memoizedHashCode = 0;
6195 @java.lang.Override
6196 public int hashCode() {
6197 if (memoizedHashCode != 0) {
6198 return memoizedHashCode;
6199 }
6200 int hash = 41;
6201 hash = (19 * hash) + getDescriptorForType().hashCode();
6202 if (hasFamilyName()) {
6203 hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
6204 hash = (53 * hash) + getFamilyName().hashCode();
6205 }
6206 if (hasStoreHomeDir()) {
6207 hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER;
6208 hash = (53 * hash) + getStoreHomeDir().hashCode();
6209 }
6210 if (getFlushOutputCount() > 0) {
6211 hash = (37 * hash) + FLUSH_OUTPUT_FIELD_NUMBER;
6212 hash = (53 * hash) + getFlushOutputList().hashCode();
6213 }
6214 hash = (29 * hash) + getUnknownFields().hashCode();
6215 memoizedHashCode = hash;
6216 return hash;
6217 }
6218
6219 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6220 com.google.protobuf.ByteString data)
6221 throws com.google.protobuf.InvalidProtocolBufferException {
6222 return PARSER.parseFrom(data);
6223 }
6224 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6225 com.google.protobuf.ByteString data,
6226 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6227 throws com.google.protobuf.InvalidProtocolBufferException {
6228 return PARSER.parseFrom(data, extensionRegistry);
6229 }
6230 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(byte[] data)
6231 throws com.google.protobuf.InvalidProtocolBufferException {
6232 return PARSER.parseFrom(data);
6233 }
6234 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6235 byte[] data,
6236 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6237 throws com.google.protobuf.InvalidProtocolBufferException {
6238 return PARSER.parseFrom(data, extensionRegistry);
6239 }
6240 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(java.io.InputStream input)
6241 throws java.io.IOException {
6242 return PARSER.parseFrom(input);
6243 }
6244 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6245 java.io.InputStream input,
6246 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6247 throws java.io.IOException {
6248 return PARSER.parseFrom(input, extensionRegistry);
6249 }
6250 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom(java.io.InputStream input)
6251 throws java.io.IOException {
6252 return PARSER.parseDelimitedFrom(input);
6253 }
6254 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom(
6255 java.io.InputStream input,
6256 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6257 throws java.io.IOException {
6258 return PARSER.parseDelimitedFrom(input, extensionRegistry);
6259 }
6260 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6261 com.google.protobuf.CodedInputStream input)
6262 throws java.io.IOException {
6263 return PARSER.parseFrom(input);
6264 }
6265 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6266 com.google.protobuf.CodedInputStream input,
6267 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6268 throws java.io.IOException {
6269 return PARSER.parseFrom(input, extensionRegistry);
6270 }
6271
6272 public static Builder newBuilder() { return Builder.create(); }
6273 public Builder newBuilderForType() { return newBuilder(); }
6274 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor prototype) {
6275 return newBuilder().mergeFrom(prototype);
6276 }
6277 public Builder toBuilder() { return newBuilder(this); }
6278
6279 @java.lang.Override
6280 protected Builder newBuilderForType(
6281 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6282 Builder builder = new Builder(parent);
6283 return builder;
6284 }
6285
6286
6287
6288 public static final class Builder extends
6289 com.google.protobuf.GeneratedMessage.Builder<Builder>
6290 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder {
6291 public static final com.google.protobuf.Descriptors.Descriptor
6292 getDescriptor() {
6293 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_StoreFlushDescriptor_descriptor;
6294 }
6295
6296 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6297 internalGetFieldAccessorTable() {
6298 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable
6299 .ensureFieldAccessorsInitialized(
6300 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class);
6301 }
6302
6303
6304 private Builder() {
6305 maybeForceBuilderInitialization();
6306 }
6307
6308 private Builder(
6309 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6310 super(parent);
6311 maybeForceBuilderInitialization();
6312 }
6313 private void maybeForceBuilderInitialization() {
6314 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6315 }
6316 }
6317 private static Builder create() {
6318 return new Builder();
6319 }
6320
6321 public Builder clear() {
6322 super.clear();
6323 familyName_ = com.google.protobuf.ByteString.EMPTY;
6324 bitField0_ = (bitField0_ & ~0x00000001);
6325 storeHomeDir_ = "";
6326 bitField0_ = (bitField0_ & ~0x00000002);
6327 flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
6328 bitField0_ = (bitField0_ & ~0x00000004);
6329 return this;
6330 }
6331
6332 public Builder clone() {
6333 return create().mergeFrom(buildPartial());
6334 }
6335
6336 public com.google.protobuf.Descriptors.Descriptor
6337 getDescriptorForType() {
6338 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_StoreFlushDescriptor_descriptor;
6339 }
6340
6341 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getDefaultInstanceForType() {
6342 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance();
6343 }
6344
6345 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor build() {
6346 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor result = buildPartial();
6347 if (!result.isInitialized()) {
6348 throw newUninitializedMessageException(result);
6349 }
6350 return result;
6351 }
6352
6353 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor buildPartial() {
6354 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor(this);
6355 int from_bitField0_ = bitField0_;
6356 int to_bitField0_ = 0;
6357 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6358 to_bitField0_ |= 0x00000001;
6359 }
6360 result.familyName_ = familyName_;
6361 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6362 to_bitField0_ |= 0x00000002;
6363 }
6364 result.storeHomeDir_ = storeHomeDir_;
6365 if (((bitField0_ & 0x00000004) == 0x00000004)) {
6366 flushOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(
6367 flushOutput_);
6368 bitField0_ = (bitField0_ & ~0x00000004);
6369 }
6370 result.flushOutput_ = flushOutput_;
6371 result.bitField0_ = to_bitField0_;
6372 onBuilt();
6373 return result;
6374 }
6375
6376 public Builder mergeFrom(com.google.protobuf.Message other) {
6377 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) {
6378 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)other);
6379 } else {
6380 super.mergeFrom(other);
6381 return this;
6382 }
6383 }
6384
6385 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor other) {
6386 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance()) return this;
6387 if (other.hasFamilyName()) {
6388 setFamilyName(other.getFamilyName());
6389 }
6390 if (other.hasStoreHomeDir()) {
6391 bitField0_ |= 0x00000002;
6392 storeHomeDir_ = other.storeHomeDir_;
6393 onChanged();
6394 }
6395 if (!other.flushOutput_.isEmpty()) {
6396 if (flushOutput_.isEmpty()) {
6397 flushOutput_ = other.flushOutput_;
6398 bitField0_ = (bitField0_ & ~0x00000004);
6399 } else {
6400 ensureFlushOutputIsMutable();
6401 flushOutput_.addAll(other.flushOutput_);
6402 }
6403 onChanged();
6404 }
6405 this.mergeUnknownFields(other.getUnknownFields());
6406 return this;
6407 }
6408
6409 public final boolean isInitialized() {
6410 if (!hasFamilyName()) {
6411
6412 return false;
6413 }
6414 if (!hasStoreHomeDir()) {
6415
6416 return false;
6417 }
6418 return true;
6419 }
6420
6421 public Builder mergeFrom(
6422 com.google.protobuf.CodedInputStream input,
6423 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6424 throws java.io.IOException {
6425 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parsedMessage = null;
6426 try {
6427 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6428 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6429 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) e.getUnfinishedMessage();
6430 throw e;
6431 } finally {
6432 if (parsedMessage != null) {
6433 mergeFrom(parsedMessage);
6434 }
6435 }
6436 return this;
6437 }
6438 private int bitField0_;
6439
6440
6441 private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY;
6442
6443
6444
6445 public boolean hasFamilyName() {
6446 return ((bitField0_ & 0x00000001) == 0x00000001);
6447 }
6448
6449
6450
6451 public com.google.protobuf.ByteString getFamilyName() {
6452 return familyName_;
6453 }
6454
6455
6456
6457 public Builder setFamilyName(com.google.protobuf.ByteString value) {
6458 if (value == null) {
6459 throw new NullPointerException();
6460 }
6461 bitField0_ |= 0x00000001;
6462 familyName_ = value;
6463 onChanged();
6464 return this;
6465 }
6466
6467
6468
6469 public Builder clearFamilyName() {
6470 bitField0_ = (bitField0_ & ~0x00000001);
6471 familyName_ = getDefaultInstance().getFamilyName();
6472 onChanged();
6473 return this;
6474 }
6475
6476
6477 private java.lang.Object storeHomeDir_ = "";
6478
6479
6480
6481
6482
6483
6484
6485 public boolean hasStoreHomeDir() {
6486 return ((bitField0_ & 0x00000002) == 0x00000002);
6487 }
6488
6489
6490
6491
6492
6493
6494
6495 public java.lang.String getStoreHomeDir() {
6496 java.lang.Object ref = storeHomeDir_;
6497 if (!(ref instanceof java.lang.String)) {
6498 java.lang.String s = ((com.google.protobuf.ByteString) ref)
6499 .toStringUtf8();
6500 storeHomeDir_ = s;
6501 return s;
6502 } else {
6503 return (java.lang.String) ref;
6504 }
6505 }
6506
6507
6508
6509
6510
6511
6512
6513 public com.google.protobuf.ByteString
6514 getStoreHomeDirBytes() {
6515 java.lang.Object ref = storeHomeDir_;
6516 if (ref instanceof String) {
6517 com.google.protobuf.ByteString b =
6518 com.google.protobuf.ByteString.copyFromUtf8(
6519 (java.lang.String) ref);
6520 storeHomeDir_ = b;
6521 return b;
6522 } else {
6523 return (com.google.protobuf.ByteString) ref;
6524 }
6525 }
6526
6527
6528
6529
6530
6531
6532
6533 public Builder setStoreHomeDir(
6534 java.lang.String value) {
6535 if (value == null) {
6536 throw new NullPointerException();
6537 }
6538 bitField0_ |= 0x00000002;
6539 storeHomeDir_ = value;
6540 onChanged();
6541 return this;
6542 }
6543
6544
6545
6546
6547
6548
6549
6550 public Builder clearStoreHomeDir() {
6551 bitField0_ = (bitField0_ & ~0x00000002);
6552 storeHomeDir_ = getDefaultInstance().getStoreHomeDir();
6553 onChanged();
6554 return this;
6555 }
6556
6557
6558
6559
6560
6561
6562
6563 public Builder setStoreHomeDirBytes(
6564 com.google.protobuf.ByteString value) {
6565 if (value == null) {
6566 throw new NullPointerException();
6567 }
6568 bitField0_ |= 0x00000002;
6569 storeHomeDir_ = value;
6570 onChanged();
6571 return this;
6572 }
6573
6574
6575 private com.google.protobuf.LazyStringList flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
6576 private void ensureFlushOutputIsMutable() {
6577 if (!((bitField0_ & 0x00000004) == 0x00000004)) {
6578 flushOutput_ = new com.google.protobuf.LazyStringArrayList(flushOutput_);
6579 bitField0_ |= 0x00000004;
6580 }
6581 }
6582
6583
6584
6585
6586
6587
6588
6589 public java.util.List<java.lang.String>
6590 getFlushOutputList() {
6591 return java.util.Collections.unmodifiableList(flushOutput_);
6592 }
6593
6594
6595
6596
6597
6598
6599
6600 public int getFlushOutputCount() {
6601 return flushOutput_.size();
6602 }
6603
6604
6605
6606
6607
6608
6609
6610 public java.lang.String getFlushOutput(int index) {
6611 return flushOutput_.get(index);
6612 }
6613
6614
6615
6616
6617
6618
6619
6620 public com.google.protobuf.ByteString
6621 getFlushOutputBytes(int index) {
6622 return flushOutput_.getByteString(index);
6623 }
6624
6625
6626
6627
6628
6629
6630
6631 public Builder setFlushOutput(
6632 int index, java.lang.String value) {
6633 if (value == null) {
6634 throw new NullPointerException();
6635 }
6636 ensureFlushOutputIsMutable();
6637 flushOutput_.set(index, value);
6638 onChanged();
6639 return this;
6640 }
6641
6642
6643
6644
6645
6646
6647
6648 public Builder addFlushOutput(
6649 java.lang.String value) {
6650 if (value == null) {
6651 throw new NullPointerException();
6652 }
6653 ensureFlushOutputIsMutable();
6654 flushOutput_.add(value);
6655 onChanged();
6656 return this;
6657 }
6658
6659
6660
6661
6662
6663
6664
6665 public Builder addAllFlushOutput(
6666 java.lang.Iterable<java.lang.String> values) {
6667 ensureFlushOutputIsMutable();
6668 super.addAll(values, flushOutput_);
6669 onChanged();
6670 return this;
6671 }
6672
6673
6674
6675
6676
6677
6678
6679 public Builder clearFlushOutput() {
6680 flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
6681 bitField0_ = (bitField0_ & ~0x00000004);
6682 onChanged();
6683 return this;
6684 }
6685
6686
6687
6688
6689
6690
6691
6692 public Builder addFlushOutputBytes(
6693 com.google.protobuf.ByteString value) {
6694 if (value == null) {
6695 throw new NullPointerException();
6696 }
6697 ensureFlushOutputIsMutable();
6698 flushOutput_.add(value);
6699 onChanged();
6700 return this;
6701 }
6702
6703
6704 }
6705
6706 static {
6707 defaultInstance = new StoreFlushDescriptor(true);
6708 defaultInstance.initFields();
6709 }
6710
6711
6712 }
6713
6714 private int bitField0_;
6715
6716 public static final int ACTION_FIELD_NUMBER = 1;
6717 private org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction action_;
6718
6719
6720
6721 public boolean hasAction() {
6722 return ((bitField0_ & 0x00000001) == 0x00000001);
6723 }
6724
6725
6726
6727 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() {
6728 return action_;
6729 }
6730
6731
6732 public static final int TABLE_NAME_FIELD_NUMBER = 2;
6733 private com.google.protobuf.ByteString tableName_;
6734
6735
6736
6737 public boolean hasTableName() {
6738 return ((bitField0_ & 0x00000002) == 0x00000002);
6739 }
6740
6741
6742
6743 public com.google.protobuf.ByteString getTableName() {
6744 return tableName_;
6745 }
6746
6747
6748 public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3;
6749 private com.google.protobuf.ByteString encodedRegionName_;
6750
6751
6752
6753 public boolean hasEncodedRegionName() {
6754 return ((bitField0_ & 0x00000004) == 0x00000004);
6755 }
6756
6757
6758
6759 public com.google.protobuf.ByteString getEncodedRegionName() {
6760 return encodedRegionName_;
6761 }
6762
6763
6764 public static final int FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER = 4;
6765 private long flushSequenceNumber_;
6766
6767
6768
6769 public boolean hasFlushSequenceNumber() {
6770 return ((bitField0_ & 0x00000008) == 0x00000008);
6771 }
6772
6773
6774
6775 public long getFlushSequenceNumber() {
6776 return flushSequenceNumber_;
6777 }
6778
6779
6780 public static final int STORE_FLUSHES_FIELD_NUMBER = 5;
6781 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> storeFlushes_;
6782
6783
6784
6785 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList() {
6786 return storeFlushes_;
6787 }
6788
6789
6790
6791 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>
6792 getStoreFlushesOrBuilderList() {
6793 return storeFlushes_;
6794 }
6795
6796
6797
6798 public int getStoreFlushesCount() {
6799 return storeFlushes_.size();
6800 }
6801
6802
6803
6804 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index) {
6805 return storeFlushes_.get(index);
6806 }
6807
6808
6809
6810 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder(
6811 int index) {
6812 return storeFlushes_.get(index);
6813 }
6814
6815
6816 public static final int REGION_NAME_FIELD_NUMBER = 6;
6817 private com.google.protobuf.ByteString regionName_;
6818
6819
6820
6821
6822
6823
6824
6825 public boolean hasRegionName() {
6826 return ((bitField0_ & 0x00000010) == 0x00000010);
6827 }
6828
6829
6830
6831
6832
6833
6834
6835 public com.google.protobuf.ByteString getRegionName() {
6836 return regionName_;
6837 }
6838
6839 private void initFields() {
6840 action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH;
6841 tableName_ = com.google.protobuf.ByteString.EMPTY;
6842 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
6843 flushSequenceNumber_ = 0L;
6844 storeFlushes_ = java.util.Collections.emptyList();
6845 regionName_ = com.google.protobuf.ByteString.EMPTY;
6846 }
6847 private byte memoizedIsInitialized = -1;
6848 public final boolean isInitialized() {
6849 byte isInitialized = memoizedIsInitialized;
6850 if (isInitialized != -1) return isInitialized == 1;
6851
6852 if (!hasAction()) {
6853 memoizedIsInitialized = 0;
6854 return false;
6855 }
6856 if (!hasTableName()) {
6857 memoizedIsInitialized = 0;
6858 return false;
6859 }
6860 if (!hasEncodedRegionName()) {
6861 memoizedIsInitialized = 0;
6862 return false;
6863 }
6864 for (int i = 0; i < getStoreFlushesCount(); i++) {
6865 if (!getStoreFlushes(i).isInitialized()) {
6866 memoizedIsInitialized = 0;
6867 return false;
6868 }
6869 }
6870 memoizedIsInitialized = 1;
6871 return true;
6872 }
6873
6874 public void writeTo(com.google.protobuf.CodedOutputStream output)
6875 throws java.io.IOException {
6876 getSerializedSize();
6877 if (((bitField0_ & 0x00000001) == 0x00000001)) {
6878 output.writeEnum(1, action_.getNumber());
6879 }
6880 if (((bitField0_ & 0x00000002) == 0x00000002)) {
6881 output.writeBytes(2, tableName_);
6882 }
6883 if (((bitField0_ & 0x00000004) == 0x00000004)) {
6884 output.writeBytes(3, encodedRegionName_);
6885 }
6886 if (((bitField0_ & 0x00000008) == 0x00000008)) {
6887 output.writeUInt64(4, flushSequenceNumber_);
6888 }
6889 for (int i = 0; i < storeFlushes_.size(); i++) {
6890 output.writeMessage(5, storeFlushes_.get(i));
6891 }
6892 if (((bitField0_ & 0x00000010) == 0x00000010)) {
6893 output.writeBytes(6, regionName_);
6894 }
6895 getUnknownFields().writeTo(output);
6896 }
6897
6898 private int memoizedSerializedSize = -1;
6899 public int getSerializedSize() {
6900 int size = memoizedSerializedSize;
6901 if (size != -1) return size;
6902
6903 size = 0;
6904 if (((bitField0_ & 0x00000001) == 0x00000001)) {
6905 size += com.google.protobuf.CodedOutputStream
6906 .computeEnumSize(1, action_.getNumber());
6907 }
6908 if (((bitField0_ & 0x00000002) == 0x00000002)) {
6909 size += com.google.protobuf.CodedOutputStream
6910 .computeBytesSize(2, tableName_);
6911 }
6912 if (((bitField0_ & 0x00000004) == 0x00000004)) {
6913 size += com.google.protobuf.CodedOutputStream
6914 .computeBytesSize(3, encodedRegionName_);
6915 }
6916 if (((bitField0_ & 0x00000008) == 0x00000008)) {
6917 size += com.google.protobuf.CodedOutputStream
6918 .computeUInt64Size(4, flushSequenceNumber_);
6919 }
6920 for (int i = 0; i < storeFlushes_.size(); i++) {
6921 size += com.google.protobuf.CodedOutputStream
6922 .computeMessageSize(5, storeFlushes_.get(i));
6923 }
6924 if (((bitField0_ & 0x00000010) == 0x00000010)) {
6925 size += com.google.protobuf.CodedOutputStream
6926 .computeBytesSize(6, regionName_);
6927 }
6928 size += getUnknownFields().getSerializedSize();
6929 memoizedSerializedSize = size;
6930 return size;
6931 }
6932
6933 private static final long serialVersionUID = 0L;
6934 @java.lang.Override
6935 protected java.lang.Object writeReplace()
6936 throws java.io.ObjectStreamException {
6937 return super.writeReplace();
6938 }
6939
6940 @java.lang.Override
6941 public boolean equals(final java.lang.Object obj) {
6942 if (obj == this) {
6943 return true;
6944 }
6945 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor)) {
6946 return super.equals(obj);
6947 }
6948 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) obj;
6949
6950 boolean result = true;
6951 result = result && (hasAction() == other.hasAction());
6952 if (hasAction()) {
6953 result = result &&
6954 (getAction() == other.getAction());
6955 }
6956 result = result && (hasTableName() == other.hasTableName());
6957 if (hasTableName()) {
6958 result = result && getTableName()
6959 .equals(other.getTableName());
6960 }
6961 result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
6962 if (hasEncodedRegionName()) {
6963 result = result && getEncodedRegionName()
6964 .equals(other.getEncodedRegionName());
6965 }
6966 result = result && (hasFlushSequenceNumber() == other.hasFlushSequenceNumber());
6967 if (hasFlushSequenceNumber()) {
6968 result = result && (getFlushSequenceNumber()
6969 == other.getFlushSequenceNumber());
6970 }
6971 result = result && getStoreFlushesList()
6972 .equals(other.getStoreFlushesList());
6973 result = result && (hasRegionName() == other.hasRegionName());
6974 if (hasRegionName()) {
6975 result = result && getRegionName()
6976 .equals(other.getRegionName());
6977 }
6978 result = result &&
6979 getUnknownFields().equals(other.getUnknownFields());
6980 return result;
6981 }
6982
6983 private int memoizedHashCode = 0;
6984 @java.lang.Override
6985 public int hashCode() {
6986 if (memoizedHashCode != 0) {
6987 return memoizedHashCode;
6988 }
6989 int hash = 41;
6990 hash = (19 * hash) + getDescriptorForType().hashCode();
6991 if (hasAction()) {
6992 hash = (37 * hash) + ACTION_FIELD_NUMBER;
6993 hash = (53 * hash) + hashEnum(getAction());
6994 }
6995 if (hasTableName()) {
6996 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
6997 hash = (53 * hash) + getTableName().hashCode();
6998 }
6999 if (hasEncodedRegionName()) {
7000 hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
7001 hash = (53 * hash) + getEncodedRegionName().hashCode();
7002 }
7003 if (hasFlushSequenceNumber()) {
7004 hash = (37 * hash) + FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER;
7005 hash = (53 * hash) + hashLong(getFlushSequenceNumber());
7006 }
7007 if (getStoreFlushesCount() > 0) {
7008 hash = (37 * hash) + STORE_FLUSHES_FIELD_NUMBER;
7009 hash = (53 * hash) + getStoreFlushesList().hashCode();
7010 }
7011 if (hasRegionName()) {
7012 hash = (37 * hash) + REGION_NAME_FIELD_NUMBER;
7013 hash = (53 * hash) + getRegionName().hashCode();
7014 }
7015 hash = (29 * hash) + getUnknownFields().hashCode();
7016 memoizedHashCode = hash;
7017 return hash;
7018 }
7019
7020 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7021 com.google.protobuf.ByteString data)
7022 throws com.google.protobuf.InvalidProtocolBufferException {
7023 return PARSER.parseFrom(data);
7024 }
7025 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7026 com.google.protobuf.ByteString data,
7027 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7028 throws com.google.protobuf.InvalidProtocolBufferException {
7029 return PARSER.parseFrom(data, extensionRegistry);
7030 }
7031 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(byte[] data)
7032 throws com.google.protobuf.InvalidProtocolBufferException {
7033 return PARSER.parseFrom(data);
7034 }
7035 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7036 byte[] data,
7037 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7038 throws com.google.protobuf.InvalidProtocolBufferException {
7039 return PARSER.parseFrom(data, extensionRegistry);
7040 }
7041 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(java.io.InputStream input)
7042 throws java.io.IOException {
7043 return PARSER.parseFrom(input);
7044 }
7045 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7046 java.io.InputStream input,
7047 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7048 throws java.io.IOException {
7049 return PARSER.parseFrom(input, extensionRegistry);
7050 }
7051 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom(java.io.InputStream input)
7052 throws java.io.IOException {
7053 return PARSER.parseDelimitedFrom(input);
7054 }
7055 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom(
7056 java.io.InputStream input,
7057 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7058 throws java.io.IOException {
7059 return PARSER.parseDelimitedFrom(input, extensionRegistry);
7060 }
7061 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7062 com.google.protobuf.CodedInputStream input)
7063 throws java.io.IOException {
7064 return PARSER.parseFrom(input);
7065 }
7066 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7067 com.google.protobuf.CodedInputStream input,
7068 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7069 throws java.io.IOException {
7070 return PARSER.parseFrom(input, extensionRegistry);
7071 }
7072
7073 public static Builder newBuilder() { return Builder.create(); }
7074 public Builder newBuilderForType() { return newBuilder(); }
7075 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor prototype) {
7076 return newBuilder().mergeFrom(prototype);
7077 }
7078 public Builder toBuilder() { return newBuilder(this); }
7079
7080 @java.lang.Override
7081 protected Builder newBuilderForType(
7082 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7083 Builder builder = new Builder(parent);
7084 return builder;
7085 }
7086
7087
7088
7089
7090
7091
7092
7093
7094 public static final class Builder extends
7095 com.google.protobuf.GeneratedMessage.Builder<Builder>
7096 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptorOrBuilder {
7097 public static final com.google.protobuf.Descriptors.Descriptor
7098 getDescriptor() {
7099 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_descriptor;
7100 }
7101
7102 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7103 internalGetFieldAccessorTable() {
7104 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_fieldAccessorTable
7105 .ensureFieldAccessorsInitialized(
7106 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.Builder.class);
7107 }
7108
7109
7110 private Builder() {
7111 maybeForceBuilderInitialization();
7112 }
7113
7114 private Builder(
7115 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7116 super(parent);
7117 maybeForceBuilderInitialization();
7118 }
7119 private void maybeForceBuilderInitialization() {
7120 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7121 getStoreFlushesFieldBuilder();
7122 }
7123 }
7124 private static Builder create() {
7125 return new Builder();
7126 }
7127
7128 public Builder clear() {
7129 super.clear();
7130 action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH;
7131 bitField0_ = (bitField0_ & ~0x00000001);
7132 tableName_ = com.google.protobuf.ByteString.EMPTY;
7133 bitField0_ = (bitField0_ & ~0x00000002);
7134 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
7135 bitField0_ = (bitField0_ & ~0x00000004);
7136 flushSequenceNumber_ = 0L;
7137 bitField0_ = (bitField0_ & ~0x00000008);
7138 if (storeFlushesBuilder_ == null) {
7139 storeFlushes_ = java.util.Collections.emptyList();
7140 bitField0_ = (bitField0_ & ~0x00000010);
7141 } else {
7142 storeFlushesBuilder_.clear();
7143 }
7144 regionName_ = com.google.protobuf.ByteString.EMPTY;
7145 bitField0_ = (bitField0_ & ~0x00000020);
7146 return this;
7147 }
7148
7149 public Builder clone() {
7150 return create().mergeFrom(buildPartial());
7151 }
7152
7153 public com.google.protobuf.Descriptors.Descriptor
7154 getDescriptorForType() {
7155 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FlushDescriptor_descriptor;
7156 }
7157
7158 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor getDefaultInstanceForType() {
7159 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDefaultInstance();
7160 }
7161
7162 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor build() {
7163 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor result = buildPartial();
7164 if (!result.isInitialized()) {
7165 throw newUninitializedMessageException(result);
7166 }
7167 return result;
7168 }
7169
7170 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor buildPartial() {
7171 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor(this);
7172 int from_bitField0_ = bitField0_;
7173 int to_bitField0_ = 0;
7174 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7175 to_bitField0_ |= 0x00000001;
7176 }
7177 result.action_ = action_;
7178 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7179 to_bitField0_ |= 0x00000002;
7180 }
7181 result.tableName_ = tableName_;
7182 if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
7183 to_bitField0_ |= 0x00000004;
7184 }
7185 result.encodedRegionName_ = encodedRegionName_;
7186 if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
7187 to_bitField0_ |= 0x00000008;
7188 }
7189 result.flushSequenceNumber_ = flushSequenceNumber_;
7190 if (storeFlushesBuilder_ == null) {
7191 if (((bitField0_ & 0x00000010) == 0x00000010)) {
7192 storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_);
7193 bitField0_ = (bitField0_ & ~0x00000010);
7194 }
7195 result.storeFlushes_ = storeFlushes_;
7196 } else {
7197 result.storeFlushes_ = storeFlushesBuilder_.build();
7198 }
7199 if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
7200 to_bitField0_ |= 0x00000010;
7201 }
7202 result.regionName_ = regionName_;
7203 result.bitField0_ = to_bitField0_;
7204 onBuilt();
7205 return result;
7206 }
7207
7208 public Builder mergeFrom(com.google.protobuf.Message other) {
7209 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) {
7210 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor)other);
7211 } else {
7212 super.mergeFrom(other);
7213 return this;
7214 }
7215 }
7216
7217 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor other) {
7218 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDefaultInstance()) return this;
7219 if (other.hasAction()) {
7220 setAction(other.getAction());
7221 }
7222 if (other.hasTableName()) {
7223 setTableName(other.getTableName());
7224 }
7225 if (other.hasEncodedRegionName()) {
7226 setEncodedRegionName(other.getEncodedRegionName());
7227 }
7228 if (other.hasFlushSequenceNumber()) {
7229 setFlushSequenceNumber(other.getFlushSequenceNumber());
7230 }
7231 if (storeFlushesBuilder_ == null) {
7232 if (!other.storeFlushes_.isEmpty()) {
7233 if (storeFlushes_.isEmpty()) {
7234 storeFlushes_ = other.storeFlushes_;
7235 bitField0_ = (bitField0_ & ~0x00000010);
7236 } else {
7237 ensureStoreFlushesIsMutable();
7238 storeFlushes_.addAll(other.storeFlushes_);
7239 }
7240 onChanged();
7241 }
7242 } else {
7243 if (!other.storeFlushes_.isEmpty()) {
7244 if (storeFlushesBuilder_.isEmpty()) {
7245 storeFlushesBuilder_.dispose();
7246 storeFlushesBuilder_ = null;
7247 storeFlushes_ = other.storeFlushes_;
7248 bitField0_ = (bitField0_ & ~0x00000010);
7249 storeFlushesBuilder_ =
7250 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
7251 getStoreFlushesFieldBuilder() : null;
7252 } else {
7253 storeFlushesBuilder_.addAllMessages(other.storeFlushes_);
7254 }
7255 }
7256 }
7257 if (other.hasRegionName()) {
7258 setRegionName(other.getRegionName());
7259 }
7260 this.mergeUnknownFields(other.getUnknownFields());
7261 return this;
7262 }
7263
7264 public final boolean isInitialized() {
7265 if (!hasAction()) {
7266
7267 return false;
7268 }
7269 if (!hasTableName()) {
7270
7271 return false;
7272 }
7273 if (!hasEncodedRegionName()) {
7274
7275 return false;
7276 }
7277 for (int i = 0; i < getStoreFlushesCount(); i++) {
7278 if (!getStoreFlushes(i).isInitialized()) {
7279
7280 return false;
7281 }
7282 }
7283 return true;
7284 }
7285
7286 public Builder mergeFrom(
7287 com.google.protobuf.CodedInputStream input,
7288 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7289 throws java.io.IOException {
7290 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parsedMessage = null;
7291 try {
7292 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7293 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7294 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) e.getUnfinishedMessage();
7295 throw e;
7296 } finally {
7297 if (parsedMessage != null) {
7298 mergeFrom(parsedMessage);
7299 }
7300 }
7301 return this;
7302 }
7303 private int bitField0_;
7304
7305
7306 private org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH;
7307
7308
7309
7310 public boolean hasAction() {
7311 return ((bitField0_ & 0x00000001) == 0x00000001);
7312 }
7313
7314
7315
7316 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() {
7317 return action_;
7318 }
7319
7320
7321
7322 public Builder setAction(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction value) {
7323 if (value == null) {
7324 throw new NullPointerException();
7325 }
7326 bitField0_ |= 0x00000001;
7327 action_ = value;
7328 onChanged();
7329 return this;
7330 }
7331
7332
7333
7334 public Builder clearAction() {
7335 bitField0_ = (bitField0_ & ~0x00000001);
7336 action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH;
7337 onChanged();
7338 return this;
7339 }
7340
7341
7342 private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
7343
7344
7345
7346 public boolean hasTableName() {
7347 return ((bitField0_ & 0x00000002) == 0x00000002);
7348 }
7349
7350
7351
7352 public com.google.protobuf.ByteString getTableName() {
7353 return tableName_;
7354 }
7355
7356
7357
7358 public Builder setTableName(com.google.protobuf.ByteString value) {
7359 if (value == null) {
7360 throw new NullPointerException();
7361 }
7362 bitField0_ |= 0x00000002;
7363 tableName_ = value;
7364 onChanged();
7365 return this;
7366 }
7367
7368
7369
7370 public Builder clearTableName() {
7371 bitField0_ = (bitField0_ & ~0x00000002);
7372 tableName_ = getDefaultInstance().getTableName();
7373 onChanged();
7374 return this;
7375 }
7376
7377
7378 private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
7379
7380
7381
7382 public boolean hasEncodedRegionName() {
7383 return ((bitField0_ & 0x00000004) == 0x00000004);
7384 }
7385
7386
7387
7388 public com.google.protobuf.ByteString getEncodedRegionName() {
7389 return encodedRegionName_;
7390 }
7391
7392
7393
7394 public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
7395 if (value == null) {
7396 throw new NullPointerException();
7397 }
7398 bitField0_ |= 0x00000004;
7399 encodedRegionName_ = value;
7400 onChanged();
7401 return this;
7402 }
7403
7404
7405
7406 public Builder clearEncodedRegionName() {
7407 bitField0_ = (bitField0_ & ~0x00000004);
7408 encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
7409 onChanged();
7410 return this;
7411 }
7412
7413
7414 private long flushSequenceNumber_ ;
7415
7416
7417
7418 public boolean hasFlushSequenceNumber() {
7419 return ((bitField0_ & 0x00000008) == 0x00000008);
7420 }
7421
7422
7423
7424 public long getFlushSequenceNumber() {
7425 return flushSequenceNumber_;
7426 }
7427
7428
7429
7430 public Builder setFlushSequenceNumber(long value) {
7431 bitField0_ |= 0x00000008;
7432 flushSequenceNumber_ = value;
7433 onChanged();
7434 return this;
7435 }
7436
7437
7438
7439 public Builder clearFlushSequenceNumber() {
7440 bitField0_ = (bitField0_ & ~0x00000008);
7441 flushSequenceNumber_ = 0L;
7442 onChanged();
7443 return this;
7444 }
7445
7446
7447 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> storeFlushes_ =
7448 java.util.Collections.emptyList();
7449 private void ensureStoreFlushesIsMutable() {
7450 if (!((bitField0_ & 0x00000010) == 0x00000010)) {
7451 storeFlushes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>(storeFlushes_);
7452 bitField0_ |= 0x00000010;
7453 }
7454 }
7455
7456 private com.google.protobuf.RepeatedFieldBuilder<
7457 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> storeFlushesBuilder_;
7458
7459
7460
7461
7462 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList() {
7463 if (storeFlushesBuilder_ == null) {
7464 return java.util.Collections.unmodifiableList(storeFlushes_);
7465 } else {
7466 return storeFlushesBuilder_.getMessageList();
7467 }
7468 }
7469
7470
7471
7472 public int getStoreFlushesCount() {
7473 if (storeFlushesBuilder_ == null) {
7474 return storeFlushes_.size();
7475 } else {
7476 return storeFlushesBuilder_.getCount();
7477 }
7478 }
7479
7480
7481
7482 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index) {
7483 if (storeFlushesBuilder_ == null) {
7484 return storeFlushes_.get(index);
7485 } else {
7486 return storeFlushesBuilder_.getMessage(index);
7487 }
7488 }
7489
7490
7491
7492 public Builder setStoreFlushes(
7493 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) {
7494 if (storeFlushesBuilder_ == null) {
7495 if (value == null) {
7496 throw new NullPointerException();
7497 }
7498 ensureStoreFlushesIsMutable();
7499 storeFlushes_.set(index, value);
7500 onChanged();
7501 } else {
7502 storeFlushesBuilder_.setMessage(index, value);
7503 }
7504 return this;
7505 }
7506
7507
7508
7509 public Builder setStoreFlushes(
7510 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) {
7511 if (storeFlushesBuilder_ == null) {
7512 ensureStoreFlushesIsMutable();
7513 storeFlushes_.set(index, builderForValue.build());
7514 onChanged();
7515 } else {
7516 storeFlushesBuilder_.setMessage(index, builderForValue.build());
7517 }
7518 return this;
7519 }
7520
7521
7522
7523 public Builder addStoreFlushes(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) {
7524 if (storeFlushesBuilder_ == null) {
7525 if (value == null) {
7526 throw new NullPointerException();
7527 }
7528 ensureStoreFlushesIsMutable();
7529 storeFlushes_.add(value);
7530 onChanged();
7531 } else {
7532 storeFlushesBuilder_.addMessage(value);
7533 }
7534 return this;
7535 }
7536
7537
7538
7539 public Builder addStoreFlushes(
7540 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) {
7541 if (storeFlushesBuilder_ == null) {
7542 if (value == null) {
7543 throw new NullPointerException();
7544 }
7545 ensureStoreFlushesIsMutable();
7546 storeFlushes_.add(index, value);
7547 onChanged();
7548 } else {
7549 storeFlushesBuilder_.addMessage(index, value);
7550 }
7551 return this;
7552 }
7553
7554
7555
7556 public Builder addStoreFlushes(
7557 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) {
7558 if (storeFlushesBuilder_ == null) {
7559 ensureStoreFlushesIsMutable();
7560 storeFlushes_.add(builderForValue.build());
7561 onChanged();
7562 } else {
7563 storeFlushesBuilder_.addMessage(builderForValue.build());
7564 }
7565 return this;
7566 }
7567
7568
7569
7570 public Builder addStoreFlushes(
7571 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) {
7572 if (storeFlushesBuilder_ == null) {
7573 ensureStoreFlushesIsMutable();
7574 storeFlushes_.add(index, builderForValue.build());
7575 onChanged();
7576 } else {
7577 storeFlushesBuilder_.addMessage(index, builderForValue.build());
7578 }
7579 return this;
7580 }
7581
7582
7583
7584 public Builder addAllStoreFlushes(
7585 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> values) {
7586 if (storeFlushesBuilder_ == null) {
7587 ensureStoreFlushesIsMutable();
7588 super.addAll(values, storeFlushes_);
7589 onChanged();
7590 } else {
7591 storeFlushesBuilder_.addAllMessages(values);
7592 }
7593 return this;
7594 }
7595
7596
7597
7598 public Builder clearStoreFlushes() {
7599 if (storeFlushesBuilder_ == null) {
7600 storeFlushes_ = java.util.Collections.emptyList();
7601 bitField0_ = (bitField0_ & ~0x00000010);
7602 onChanged();
7603 } else {
7604 storeFlushesBuilder_.clear();
7605 }
7606 return this;
7607 }
7608
7609
7610
7611 public Builder removeStoreFlushes(int index) {
7612 if (storeFlushesBuilder_ == null) {
7613 ensureStoreFlushesIsMutable();
7614 storeFlushes_.remove(index);
7615 onChanged();
7616 } else {
7617 storeFlushesBuilder_.remove(index);
7618 }
7619 return this;
7620 }
7621
7622
7623
7624 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder getStoreFlushesBuilder(
7625 int index) {
7626 return getStoreFlushesFieldBuilder().getBuilder(index);
7627 }
7628
7629
7630
7631 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder(
7632 int index) {
7633 if (storeFlushesBuilder_ == null) {
7634 return storeFlushes_.get(index); } else {
7635 return storeFlushesBuilder_.getMessageOrBuilder(index);
7636 }
7637 }
7638
7639
7640
7641 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>
7642 getStoreFlushesOrBuilderList() {
7643 if (storeFlushesBuilder_ != null) {
7644 return storeFlushesBuilder_.getMessageOrBuilderList();
7645 } else {
7646 return java.util.Collections.unmodifiableList(storeFlushes_);
7647 }
7648 }
7649
7650
7651
7652 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder addStoreFlushesBuilder() {
7653 return getStoreFlushesFieldBuilder().addBuilder(
7654 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance());
7655 }
7656
7657
7658
7659 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder addStoreFlushesBuilder(
7660 int index) {
7661 return getStoreFlushesFieldBuilder().addBuilder(
7662 index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance());
7663 }
7664
7665
7666
7667 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder>
7668 getStoreFlushesBuilderList() {
7669 return getStoreFlushesFieldBuilder().getBuilderList();
7670 }
7671 private com.google.protobuf.RepeatedFieldBuilder<
7672 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>
7673 getStoreFlushesFieldBuilder() {
7674 if (storeFlushesBuilder_ == null) {
7675 storeFlushesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
7676 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>(
7677 storeFlushes_,
7678 ((bitField0_ & 0x00000010) == 0x00000010),
7679 getParentForChildren(),
7680 isClean());
7681 storeFlushes_ = null;
7682 }
7683 return storeFlushesBuilder_;
7684 }
7685
7686
7687 private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY;
7688
7689
7690
7691
7692
7693
7694
7695 public boolean hasRegionName() {
7696 return ((bitField0_ & 0x00000020) == 0x00000020);
7697 }
7698
7699
7700
7701
7702
7703
7704
7705 public com.google.protobuf.ByteString getRegionName() {
7706 return regionName_;
7707 }
7708
7709
7710
7711
7712
7713
7714
7715 public Builder setRegionName(com.google.protobuf.ByteString value) {
7716 if (value == null) {
7717 throw new NullPointerException();
7718 }
7719 bitField0_ |= 0x00000020;
7720 regionName_ = value;
7721 onChanged();
7722 return this;
7723 }
7724
7725
7726
7727
7728
7729
7730
7731 public Builder clearRegionName() {
7732 bitField0_ = (bitField0_ & ~0x00000020);
7733 regionName_ = getDefaultInstance().getRegionName();
7734 onChanged();
7735 return this;
7736 }
7737
7738
7739 }
7740
7741 static {
7742 defaultInstance = new FlushDescriptor(true);
7743 defaultInstance.initFields();
7744 }
7745
7746
7747 }
7748
7749 public interface StoreDescriptorOrBuilder
7750 extends com.google.protobuf.MessageOrBuilder {
7751
7752
7753
7754
7755
7756 boolean hasFamilyName();
7757
7758
7759
7760 com.google.protobuf.ByteString getFamilyName();
7761
7762
7763
7764
7765
7766
7767
7768
7769
7770 boolean hasStoreHomeDir();
7771
7772
7773
7774
7775
7776
7777
7778 java.lang.String getStoreHomeDir();
7779
7780
7781
7782
7783
7784
7785
7786 com.google.protobuf.ByteString
7787 getStoreHomeDirBytes();
7788
7789
7790
7791
7792
7793
7794
7795
7796
7797 java.util.List<java.lang.String>
7798 getStoreFileList();
7799
7800
7801
7802
7803
7804
7805
7806 int getStoreFileCount();
7807
7808
7809
7810
7811
7812
7813
7814 java.lang.String getStoreFile(int index);
7815
7816
7817
7818
7819
7820
7821
7822 com.google.protobuf.ByteString
7823 getStoreFileBytes(int index);
7824 }
7825
7826
7827
7828 public static final class StoreDescriptor extends
7829 com.google.protobuf.GeneratedMessage
7830 implements StoreDescriptorOrBuilder {
7831
7832 private StoreDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7833 super(builder);
7834 this.unknownFields = builder.getUnknownFields();
7835 }
7836 private StoreDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7837
7838 private static final StoreDescriptor defaultInstance;
7839 public static StoreDescriptor getDefaultInstance() {
7840 return defaultInstance;
7841 }
7842
7843 public StoreDescriptor getDefaultInstanceForType() {
7844 return defaultInstance;
7845 }
7846
7847 private final com.google.protobuf.UnknownFieldSet unknownFields;
7848 @java.lang.Override
7849 public final com.google.protobuf.UnknownFieldSet
7850 getUnknownFields() {
7851 return this.unknownFields;
7852 }
7853 private StoreDescriptor(
7854 com.google.protobuf.CodedInputStream input,
7855 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7856 throws com.google.protobuf.InvalidProtocolBufferException {
7857 initFields();
7858 int mutable_bitField0_ = 0;
7859 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7860 com.google.protobuf.UnknownFieldSet.newBuilder();
7861 try {
7862 boolean done = false;
7863 while (!done) {
7864 int tag = input.readTag();
7865 switch (tag) {
7866 case 0:
7867 done = true;
7868 break;
7869 default: {
7870 if (!parseUnknownField(input, unknownFields,
7871 extensionRegistry, tag)) {
7872 done = true;
7873 }
7874 break;
7875 }
7876 case 10: {
7877 bitField0_ |= 0x00000001;
7878 familyName_ = input.readBytes();
7879 break;
7880 }
7881 case 18: {
7882 bitField0_ |= 0x00000002;
7883 storeHomeDir_ = input.readBytes();
7884 break;
7885 }
7886 case 26: {
7887 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
7888 storeFile_ = new com.google.protobuf.LazyStringArrayList();
7889 mutable_bitField0_ |= 0x00000004;
7890 }
7891 storeFile_.add(input.readBytes());
7892 break;
7893 }
7894 }
7895 }
7896 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7897 throw e.setUnfinishedMessage(this);
7898 } catch (java.io.IOException e) {
7899 throw new com.google.protobuf.InvalidProtocolBufferException(
7900 e.getMessage()).setUnfinishedMessage(this);
7901 } finally {
7902 if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
7903 storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_);
7904 }
7905 this.unknownFields = unknownFields.build();
7906 makeExtensionsImmutable();
7907 }
7908 }
7909 public static final com.google.protobuf.Descriptors.Descriptor
7910 getDescriptor() {
7911 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_descriptor;
7912 }
7913
7914 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7915 internalGetFieldAccessorTable() {
7916 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_fieldAccessorTable
7917 .ensureFieldAccessorsInitialized(
7918 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder.class);
7919 }
7920
7921 public static com.google.protobuf.Parser<StoreDescriptor> PARSER =
7922 new com.google.protobuf.AbstractParser<StoreDescriptor>() {
7923 public StoreDescriptor parsePartialFrom(
7924 com.google.protobuf.CodedInputStream input,
7925 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7926 throws com.google.protobuf.InvalidProtocolBufferException {
7927 return new StoreDescriptor(input, extensionRegistry);
7928 }
7929 };
7930
7931 @java.lang.Override
7932 public com.google.protobuf.Parser<StoreDescriptor> getParserForType() {
7933 return PARSER;
7934 }
7935
7936 private int bitField0_;
7937
7938 public static final int FAMILY_NAME_FIELD_NUMBER = 1;
7939 private com.google.protobuf.ByteString familyName_;
7940
7941
7942
7943 public boolean hasFamilyName() {
7944 return ((bitField0_ & 0x00000001) == 0x00000001);
7945 }
7946
7947
7948
7949 public com.google.protobuf.ByteString getFamilyName() {
7950 return familyName_;
7951 }
7952
7953
7954 public static final int STORE_HOME_DIR_FIELD_NUMBER = 2;
7955 private java.lang.Object storeHomeDir_;
7956
7957
7958
7959
7960
7961
7962
7963 public boolean hasStoreHomeDir() {
7964 return ((bitField0_ & 0x00000002) == 0x00000002);
7965 }
7966
7967
7968
7969
7970
7971
7972
7973 public java.lang.String getStoreHomeDir() {
7974 java.lang.Object ref = storeHomeDir_;
7975 if (ref instanceof java.lang.String) {
7976 return (java.lang.String) ref;
7977 } else {
7978 com.google.protobuf.ByteString bs =
7979 (com.google.protobuf.ByteString) ref;
7980 java.lang.String s = bs.toStringUtf8();
7981 if (bs.isValidUtf8()) {
7982 storeHomeDir_ = s;
7983 }
7984 return s;
7985 }
7986 }
7987
7988
7989
7990
7991
7992
7993
7994 public com.google.protobuf.ByteString
7995 getStoreHomeDirBytes() {
7996 java.lang.Object ref = storeHomeDir_;
7997 if (ref instanceof java.lang.String) {
7998 com.google.protobuf.ByteString b =
7999 com.google.protobuf.ByteString.copyFromUtf8(
8000 (java.lang.String) ref);
8001 storeHomeDir_ = b;
8002 return b;
8003 } else {
8004 return (com.google.protobuf.ByteString) ref;
8005 }
8006 }
8007
8008
8009 public static final int STORE_FILE_FIELD_NUMBER = 3;
8010 private com.google.protobuf.LazyStringList storeFile_;
8011
8012
8013
8014
8015
8016
8017
8018 public java.util.List<java.lang.String>
8019 getStoreFileList() {
8020 return storeFile_;
8021 }
8022
8023
8024
8025
8026
8027
8028
8029 public int getStoreFileCount() {
8030 return storeFile_.size();
8031 }
8032
8033
8034
8035
8036
8037
8038
8039 public java.lang.String getStoreFile(int index) {
8040 return storeFile_.get(index);
8041 }
8042
8043
8044
8045
8046
8047
8048
8049 public com.google.protobuf.ByteString
8050 getStoreFileBytes(int index) {
8051 return storeFile_.getByteString(index);
8052 }
8053
8054 private void initFields() {
8055 familyName_ = com.google.protobuf.ByteString.EMPTY;
8056 storeHomeDir_ = "";
8057 storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
8058 }
8059 private byte memoizedIsInitialized = -1;
8060 public final boolean isInitialized() {
8061 byte isInitialized = memoizedIsInitialized;
8062 if (isInitialized != -1) return isInitialized == 1;
8063
8064 if (!hasFamilyName()) {
8065 memoizedIsInitialized = 0;
8066 return false;
8067 }
8068 if (!hasStoreHomeDir()) {
8069 memoizedIsInitialized = 0;
8070 return false;
8071 }
8072 memoizedIsInitialized = 1;
8073 return true;
8074 }
8075
8076 public void writeTo(com.google.protobuf.CodedOutputStream output)
8077 throws java.io.IOException {
8078 getSerializedSize();
8079 if (((bitField0_ & 0x00000001) == 0x00000001)) {
8080 output.writeBytes(1, familyName_);
8081 }
8082 if (((bitField0_ & 0x00000002) == 0x00000002)) {
8083 output.writeBytes(2, getStoreHomeDirBytes());
8084 }
8085 for (int i = 0; i < storeFile_.size(); i++) {
8086 output.writeBytes(3, storeFile_.getByteString(i));
8087 }
8088 getUnknownFields().writeTo(output);
8089 }
8090
8091 private int memoizedSerializedSize = -1;
8092 public int getSerializedSize() {
8093 int size = memoizedSerializedSize;
8094 if (size != -1) return size;
8095
8096 size = 0;
8097 if (((bitField0_ & 0x00000001) == 0x00000001)) {
8098 size += com.google.protobuf.CodedOutputStream
8099 .computeBytesSize(1, familyName_);
8100 }
8101 if (((bitField0_ & 0x00000002) == 0x00000002)) {
8102 size += com.google.protobuf.CodedOutputStream
8103 .computeBytesSize(2, getStoreHomeDirBytes());
8104 }
8105 {
8106 int dataSize = 0;
8107 for (int i = 0; i < storeFile_.size(); i++) {
8108 dataSize += com.google.protobuf.CodedOutputStream
8109 .computeBytesSizeNoTag(storeFile_.getByteString(i));
8110 }
8111 size += dataSize;
8112 size += 1 * getStoreFileList().size();
8113 }
8114 size += getUnknownFields().getSerializedSize();
8115 memoizedSerializedSize = size;
8116 return size;
8117 }
8118
8119 private static final long serialVersionUID = 0L;
8120 @java.lang.Override
8121 protected java.lang.Object writeReplace()
8122 throws java.io.ObjectStreamException {
8123 return super.writeReplace();
8124 }
8125
8126 @java.lang.Override
8127 public boolean equals(final java.lang.Object obj) {
8128 if (obj == this) {
8129 return true;
8130 }
8131 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor)) {
8132 return super.equals(obj);
8133 }
8134 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) obj;
8135
8136 boolean result = true;
8137 result = result && (hasFamilyName() == other.hasFamilyName());
8138 if (hasFamilyName()) {
8139 result = result && getFamilyName()
8140 .equals(other.getFamilyName());
8141 }
8142 result = result && (hasStoreHomeDir() == other.hasStoreHomeDir());
8143 if (hasStoreHomeDir()) {
8144 result = result && getStoreHomeDir()
8145 .equals(other.getStoreHomeDir());
8146 }
8147 result = result && getStoreFileList()
8148 .equals(other.getStoreFileList());
8149 result = result &&
8150 getUnknownFields().equals(other.getUnknownFields());
8151 return result;
8152 }
8153
8154 private int memoizedHashCode = 0;
8155 @java.lang.Override
8156 public int hashCode() {
8157 if (memoizedHashCode != 0) {
8158 return memoizedHashCode;
8159 }
8160 int hash = 41;
8161 hash = (19 * hash) + getDescriptorForType().hashCode();
8162 if (hasFamilyName()) {
8163 hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
8164 hash = (53 * hash) + getFamilyName().hashCode();
8165 }
8166 if (hasStoreHomeDir()) {
8167 hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER;
8168 hash = (53 * hash) + getStoreHomeDir().hashCode();
8169 }
8170 if (getStoreFileCount() > 0) {
8171 hash = (37 * hash) + STORE_FILE_FIELD_NUMBER;
8172 hash = (53 * hash) + getStoreFileList().hashCode();
8173 }
8174 hash = (29 * hash) + getUnknownFields().hashCode();
8175 memoizedHashCode = hash;
8176 return hash;
8177 }
8178
8179 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8180 com.google.protobuf.ByteString data)
8181 throws com.google.protobuf.InvalidProtocolBufferException {
8182 return PARSER.parseFrom(data);
8183 }
8184 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8185 com.google.protobuf.ByteString data,
8186 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8187 throws com.google.protobuf.InvalidProtocolBufferException {
8188 return PARSER.parseFrom(data, extensionRegistry);
8189 }
8190 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(byte[] data)
8191 throws com.google.protobuf.InvalidProtocolBufferException {
8192 return PARSER.parseFrom(data);
8193 }
8194 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8195 byte[] data,
8196 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8197 throws com.google.protobuf.InvalidProtocolBufferException {
8198 return PARSER.parseFrom(data, extensionRegistry);
8199 }
8200 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(java.io.InputStream input)
8201 throws java.io.IOException {
8202 return PARSER.parseFrom(input);
8203 }
8204 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8205 java.io.InputStream input,
8206 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8207 throws java.io.IOException {
8208 return PARSER.parseFrom(input, extensionRegistry);
8209 }
8210 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom(java.io.InputStream input)
8211 throws java.io.IOException {
8212 return PARSER.parseDelimitedFrom(input);
8213 }
8214 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom(
8215 java.io.InputStream input,
8216 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8217 throws java.io.IOException {
8218 return PARSER.parseDelimitedFrom(input, extensionRegistry);
8219 }
8220 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8221 com.google.protobuf.CodedInputStream input)
8222 throws java.io.IOException {
8223 return PARSER.parseFrom(input);
8224 }
8225 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8226 com.google.protobuf.CodedInputStream input,
8227 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8228 throws java.io.IOException {
8229 return PARSER.parseFrom(input, extensionRegistry);
8230 }
8231
8232 public static Builder newBuilder() { return Builder.create(); }
8233 public Builder newBuilderForType() { return newBuilder(); }
8234 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor prototype) {
8235 return newBuilder().mergeFrom(prototype);
8236 }
8237 public Builder toBuilder() { return newBuilder(this); }
8238
8239 @java.lang.Override
8240 protected Builder newBuilderForType(
8241 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8242 Builder builder = new Builder(parent);
8243 return builder;
8244 }
8245
8246
8247
8248 public static final class Builder extends
8249 com.google.protobuf.GeneratedMessage.Builder<Builder>
8250 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder {
8251 public static final com.google.protobuf.Descriptors.Descriptor
8252 getDescriptor() {
8253 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_descriptor;
8254 }
8255
8256 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8257 internalGetFieldAccessorTable() {
8258 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_fieldAccessorTable
8259 .ensureFieldAccessorsInitialized(
8260 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder.class);
8261 }
8262
8263
8264 private Builder() {
8265 maybeForceBuilderInitialization();
8266 }
8267
8268 private Builder(
8269 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8270 super(parent);
8271 maybeForceBuilderInitialization();
8272 }
8273 private void maybeForceBuilderInitialization() {
8274 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8275 }
8276 }
8277 private static Builder create() {
8278 return new Builder();
8279 }
8280
8281 public Builder clear() {
8282 super.clear();
8283 familyName_ = com.google.protobuf.ByteString.EMPTY;
8284 bitField0_ = (bitField0_ & ~0x00000001);
8285 storeHomeDir_ = "";
8286 bitField0_ = (bitField0_ & ~0x00000002);
8287 storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
8288 bitField0_ = (bitField0_ & ~0x00000004);
8289 return this;
8290 }
8291
8292 public Builder clone() {
8293 return create().mergeFrom(buildPartial());
8294 }
8295
8296 public com.google.protobuf.Descriptors.Descriptor
8297 getDescriptorForType() {
8298 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_descriptor;
8299 }
8300
8301 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstanceForType() {
8302 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance();
8303 }
8304
8305 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor build() {
8306 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor result = buildPartial();
8307 if (!result.isInitialized()) {
8308 throw newUninitializedMessageException(result);
8309 }
8310 return result;
8311 }
8312
8313 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor buildPartial() {
8314 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor(this);
8315 int from_bitField0_ = bitField0_;
8316 int to_bitField0_ = 0;
8317 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8318 to_bitField0_ |= 0x00000001;
8319 }
8320 result.familyName_ = familyName_;
8321 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
8322 to_bitField0_ |= 0x00000002;
8323 }
8324 result.storeHomeDir_ = storeHomeDir_;
8325 if (((bitField0_ & 0x00000004) == 0x00000004)) {
8326 storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(
8327 storeFile_);
8328 bitField0_ = (bitField0_ & ~0x00000004);
8329 }
8330 result.storeFile_ = storeFile_;
8331 result.bitField0_ = to_bitField0_;
8332 onBuilt();
8333 return result;
8334 }
8335
8336 public Builder mergeFrom(com.google.protobuf.Message other) {
8337 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) {
8338 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor)other);
8339 } else {
8340 super.mergeFrom(other);
8341 return this;
8342 }
8343 }
8344
8345 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor other) {
8346 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()) return this;
8347 if (other.hasFamilyName()) {
8348 setFamilyName(other.getFamilyName());
8349 }
8350 if (other.hasStoreHomeDir()) {
8351 bitField0_ |= 0x00000002;
8352 storeHomeDir_ = other.storeHomeDir_;
8353 onChanged();
8354 }
8355 if (!other.storeFile_.isEmpty()) {
8356 if (storeFile_.isEmpty()) {
8357 storeFile_ = other.storeFile_;
8358 bitField0_ = (bitField0_ & ~0x00000004);
8359 } else {
8360 ensureStoreFileIsMutable();
8361 storeFile_.addAll(other.storeFile_);
8362 }
8363 onChanged();
8364 }
8365 this.mergeUnknownFields(other.getUnknownFields());
8366 return this;
8367 }
8368
8369 public final boolean isInitialized() {
8370 if (!hasFamilyName()) {
8371
8372 return false;
8373 }
8374 if (!hasStoreHomeDir()) {
8375
8376 return false;
8377 }
8378 return true;
8379 }
8380
8381 public Builder mergeFrom(
8382 com.google.protobuf.CodedInputStream input,
8383 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8384 throws java.io.IOException {
8385 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parsedMessage = null;
8386 try {
8387 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8388 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8389 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) e.getUnfinishedMessage();
8390 throw e;
8391 } finally {
8392 if (parsedMessage != null) {
8393 mergeFrom(parsedMessage);
8394 }
8395 }
8396 return this;
8397 }
8398 private int bitField0_;
8399
8400
8401 private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY;
8402
8403
8404
8405 public boolean hasFamilyName() {
8406 return ((bitField0_ & 0x00000001) == 0x00000001);
8407 }
8408
8409
8410
8411 public com.google.protobuf.ByteString getFamilyName() {
8412 return familyName_;
8413 }
8414
8415
8416
8417 public Builder setFamilyName(com.google.protobuf.ByteString value) {
8418 if (value == null) {
8419 throw new NullPointerException();
8420 }
8421 bitField0_ |= 0x00000001;
8422 familyName_ = value;
8423 onChanged();
8424 return this;
8425 }
8426
8427
8428
8429 public Builder clearFamilyName() {
8430 bitField0_ = (bitField0_ & ~0x00000001);
8431 familyName_ = getDefaultInstance().getFamilyName();
8432 onChanged();
8433 return this;
8434 }
8435
8436
8437 private java.lang.Object storeHomeDir_ = "";
8438
8439
8440
8441
8442
8443
8444
8445 public boolean hasStoreHomeDir() {
8446 return ((bitField0_ & 0x00000002) == 0x00000002);
8447 }
8448
8449
8450
8451
8452
8453
8454
8455 public java.lang.String getStoreHomeDir() {
8456 java.lang.Object ref = storeHomeDir_;
8457 if (!(ref instanceof java.lang.String)) {
8458 java.lang.String s = ((com.google.protobuf.ByteString) ref)
8459 .toStringUtf8();
8460 storeHomeDir_ = s;
8461 return s;
8462 } else {
8463 return (java.lang.String) ref;
8464 }
8465 }
8466
8467
8468
8469
8470
8471
8472
8473 public com.google.protobuf.ByteString
8474 getStoreHomeDirBytes() {
8475 java.lang.Object ref = storeHomeDir_;
8476 if (ref instanceof String) {
8477 com.google.protobuf.ByteString b =
8478 com.google.protobuf.ByteString.copyFromUtf8(
8479 (java.lang.String) ref);
8480 storeHomeDir_ = b;
8481 return b;
8482 } else {
8483 return (com.google.protobuf.ByteString) ref;
8484 }
8485 }
8486
8487
8488
8489
8490
8491
8492
8493 public Builder setStoreHomeDir(
8494 java.lang.String value) {
8495 if (value == null) {
8496 throw new NullPointerException();
8497 }
8498 bitField0_ |= 0x00000002;
8499 storeHomeDir_ = value;
8500 onChanged();
8501 return this;
8502 }
8503
8504
8505
8506
8507
8508
8509
8510 public Builder clearStoreHomeDir() {
8511 bitField0_ = (bitField0_ & ~0x00000002);
8512 storeHomeDir_ = getDefaultInstance().getStoreHomeDir();
8513 onChanged();
8514 return this;
8515 }
8516
8517
8518
8519
8520
8521
8522
8523 public Builder setStoreHomeDirBytes(
8524 com.google.protobuf.ByteString value) {
8525 if (value == null) {
8526 throw new NullPointerException();
8527 }
8528 bitField0_ |= 0x00000002;
8529 storeHomeDir_ = value;
8530 onChanged();
8531 return this;
8532 }
8533
8534
8535 private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
8536 private void ensureStoreFileIsMutable() {
8537 if (!((bitField0_ & 0x00000004) == 0x00000004)) {
8538 storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_);
8539 bitField0_ |= 0x00000004;
8540 }
8541 }
8542
8543
8544
8545
8546
8547
8548
8549 public java.util.List<java.lang.String>
8550 getStoreFileList() {
8551 return java.util.Collections.unmodifiableList(storeFile_);
8552 }
8553
8554
8555
8556
8557
8558
8559
8560 public int getStoreFileCount() {
8561 return storeFile_.size();
8562 }
8563
8564
8565
8566
8567
8568
8569
8570 public java.lang.String getStoreFile(int index) {
8571 return storeFile_.get(index);
8572 }
8573
8574
8575
8576
8577
8578
8579
8580 public com.google.protobuf.ByteString
8581 getStoreFileBytes(int index) {
8582 return storeFile_.getByteString(index);
8583 }
8584
8585
8586
8587
8588
8589
8590
8591 public Builder setStoreFile(
8592 int index, java.lang.String value) {
8593 if (value == null) {
8594 throw new NullPointerException();
8595 }
8596 ensureStoreFileIsMutable();
8597 storeFile_.set(index, value);
8598 onChanged();
8599 return this;
8600 }
8601
8602
8603
8604
8605
8606
8607
8608 public Builder addStoreFile(
8609 java.lang.String value) {
8610 if (value == null) {
8611 throw new NullPointerException();
8612 }
8613 ensureStoreFileIsMutable();
8614 storeFile_.add(value);
8615 onChanged();
8616 return this;
8617 }
8618
8619
8620
8621
8622
8623
8624
8625 public Builder addAllStoreFile(
8626 java.lang.Iterable<java.lang.String> values) {
8627 ensureStoreFileIsMutable();
8628 super.addAll(values, storeFile_);
8629 onChanged();
8630 return this;
8631 }
8632
8633
8634
8635
8636
8637
8638
8639 public Builder clearStoreFile() {
8640 storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
8641 bitField0_ = (bitField0_ & ~0x00000004);
8642 onChanged();
8643 return this;
8644 }
8645
8646
8647
8648
8649
8650
8651
8652 public Builder addStoreFileBytes(
8653 com.google.protobuf.ByteString value) {
8654 if (value == null) {
8655 throw new NullPointerException();
8656 }
8657 ensureStoreFileIsMutable();
8658 storeFile_.add(value);
8659 onChanged();
8660 return this;
8661 }
8662
8663
8664 }
8665
8666 static {
8667 defaultInstance = new StoreDescriptor(true);
8668 defaultInstance.initFields();
8669 }
8670
8671
8672 }
8673
8674 public interface BulkLoadDescriptorOrBuilder
8675 extends com.google.protobuf.MessageOrBuilder {
8676
8677
8678
8679
8680
8681 boolean hasTableName();
8682
8683
8684
8685 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
8686
8687
8688
8689 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
8690
8691
8692
8693
8694
8695 boolean hasEncodedRegionName();
8696
8697
8698
8699 com.google.protobuf.ByteString getEncodedRegionName();
8700
8701
8702
8703
8704
8705 java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>
8706 getStoresList();
8707
8708
8709
8710 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index);
8711
8712
8713
8714 int getStoresCount();
8715
8716
8717
8718 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>
8719 getStoresOrBuilderList();
8720
8721
8722
8723 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
8724 int index);
8725
8726
8727
8728
8729
8730 boolean hasBulkloadSeqNum();
8731
8732
8733
8734 long getBulkloadSeqNum();
8735 }
8736
8737
8738
8739
8740
8741
8742
8743
8744 public static final class BulkLoadDescriptor extends
8745 com.google.protobuf.GeneratedMessage
8746 implements BulkLoadDescriptorOrBuilder {
8747
8748 private BulkLoadDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8749 super(builder);
8750 this.unknownFields = builder.getUnknownFields();
8751 }
8752 private BulkLoadDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8753
8754 private static final BulkLoadDescriptor defaultInstance;
8755 public static BulkLoadDescriptor getDefaultInstance() {
8756 return defaultInstance;
8757 }
8758
8759 public BulkLoadDescriptor getDefaultInstanceForType() {
8760 return defaultInstance;
8761 }
8762
8763 private final com.google.protobuf.UnknownFieldSet unknownFields;
8764 @java.lang.Override
8765 public final com.google.protobuf.UnknownFieldSet
8766 getUnknownFields() {
8767 return this.unknownFields;
8768 }
8769 private BulkLoadDescriptor(
8770 com.google.protobuf.CodedInputStream input,
8771 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8772 throws com.google.protobuf.InvalidProtocolBufferException {
8773 initFields();
8774 int mutable_bitField0_ = 0;
8775 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8776 com.google.protobuf.UnknownFieldSet.newBuilder();
8777 try {
8778 boolean done = false;
8779 while (!done) {
8780 int tag = input.readTag();
8781 switch (tag) {
8782 case 0:
8783 done = true;
8784 break;
8785 default: {
8786 if (!parseUnknownField(input, unknownFields,
8787 extensionRegistry, tag)) {
8788 done = true;
8789 }
8790 break;
8791 }
8792 case 10: {
8793 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
8794 if (((bitField0_ & 0x00000001) == 0x00000001)) {
8795 subBuilder = tableName_.toBuilder();
8796 }
8797 tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
8798 if (subBuilder != null) {
8799 subBuilder.mergeFrom(tableName_);
8800 tableName_ = subBuilder.buildPartial();
8801 }
8802 bitField0_ |= 0x00000001;
8803 break;
8804 }
8805 case 18: {
8806 bitField0_ |= 0x00000002;
8807 encodedRegionName_ = input.readBytes();
8808 break;
8809 }
8810 case 26: {
8811 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8812 stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>();
8813 mutable_bitField0_ |= 0x00000004;
8814 }
8815 stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry));
8816 break;
8817 }
8818 case 32: {
8819 bitField0_ |= 0x00000004;
8820 bulkloadSeqNum_ = input.readInt64();
8821 break;
8822 }
8823 }
8824 }
8825 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8826 throw e.setUnfinishedMessage(this);
8827 } catch (java.io.IOException e) {
8828 throw new com.google.protobuf.InvalidProtocolBufferException(
8829 e.getMessage()).setUnfinishedMessage(this);
8830 } finally {
8831 if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8832 stores_ = java.util.Collections.unmodifiableList(stores_);
8833 }
8834 this.unknownFields = unknownFields.build();
8835 makeExtensionsImmutable();
8836 }
8837 }
8838 public static final com.google.protobuf.Descriptors.Descriptor
8839 getDescriptor() {
8840 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_descriptor;
8841 }
8842
8843 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8844 internalGetFieldAccessorTable() {
8845 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_fieldAccessorTable
8846 .ensureFieldAccessorsInitialized(
8847 org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class);
8848 }
8849
8850 public static com.google.protobuf.Parser<BulkLoadDescriptor> PARSER =
8851 new com.google.protobuf.AbstractParser<BulkLoadDescriptor>() {
8852 public BulkLoadDescriptor parsePartialFrom(
8853 com.google.protobuf.CodedInputStream input,
8854 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8855 throws com.google.protobuf.InvalidProtocolBufferException {
8856 return new BulkLoadDescriptor(input, extensionRegistry);
8857 }
8858 };
8859
8860 @java.lang.Override
8861 public com.google.protobuf.Parser<BulkLoadDescriptor> getParserForType() {
8862 return PARSER;
8863 }
8864
8865 private int bitField0_;
8866
8867 public static final int TABLE_NAME_FIELD_NUMBER = 1;
8868 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
8869
8870
8871
8872 public boolean hasTableName() {
8873 return ((bitField0_ & 0x00000001) == 0x00000001);
8874 }
8875
8876
8877
8878 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
8879 return tableName_;
8880 }
8881
8882
8883
8884 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
8885 return tableName_;
8886 }
8887
8888
8889 public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2;
8890 private com.google.protobuf.ByteString encodedRegionName_;
8891
8892
8893
8894 public boolean hasEncodedRegionName() {
8895 return ((bitField0_ & 0x00000002) == 0x00000002);
8896 }
8897
8898
8899
8900 public com.google.protobuf.ByteString getEncodedRegionName() {
8901 return encodedRegionName_;
8902 }
8903
8904
8905 public static final int STORES_FIELD_NUMBER = 3;
8906 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_;
8907
8908
8909
8910 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() {
8911 return stores_;
8912 }
8913
8914
8915
8916 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>
8917 getStoresOrBuilderList() {
8918 return stores_;
8919 }
8920
8921
8922
8923 public int getStoresCount() {
8924 return stores_.size();
8925 }
8926
8927
8928
8929 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) {
8930 return stores_.get(index);
8931 }
8932
8933
8934
8935 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
8936 int index) {
8937 return stores_.get(index);
8938 }
8939
8940
8941 public static final int BULKLOAD_SEQ_NUM_FIELD_NUMBER = 4;
8942 private long bulkloadSeqNum_;
8943
8944
8945
8946 public boolean hasBulkloadSeqNum() {
8947 return ((bitField0_ & 0x00000004) == 0x00000004);
8948 }
8949
8950
8951
8952 public long getBulkloadSeqNum() {
8953 return bulkloadSeqNum_;
8954 }
8955
8956 private void initFields() {
8957 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
8958 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
8959 stores_ = java.util.Collections.emptyList();
8960 bulkloadSeqNum_ = 0L;
8961 }
8962 private byte memoizedIsInitialized = -1;
8963 public final boolean isInitialized() {
8964 byte isInitialized = memoizedIsInitialized;
8965 if (isInitialized != -1) return isInitialized == 1;
8966
8967 if (!hasTableName()) {
8968 memoizedIsInitialized = 0;
8969 return false;
8970 }
8971 if (!hasEncodedRegionName()) {
8972 memoizedIsInitialized = 0;
8973 return false;
8974 }
8975 if (!hasBulkloadSeqNum()) {
8976 memoizedIsInitialized = 0;
8977 return false;
8978 }
8979 if (!getTableName().isInitialized()) {
8980 memoizedIsInitialized = 0;
8981 return false;
8982 }
8983 for (int i = 0; i < getStoresCount(); i++) {
8984 if (!getStores(i).isInitialized()) {
8985 memoizedIsInitialized = 0;
8986 return false;
8987 }
8988 }
8989 memoizedIsInitialized = 1;
8990 return true;
8991 }
8992
8993 public void writeTo(com.google.protobuf.CodedOutputStream output)
8994 throws java.io.IOException {
8995 getSerializedSize();
8996 if (((bitField0_ & 0x00000001) == 0x00000001)) {
8997 output.writeMessage(1, tableName_);
8998 }
8999 if (((bitField0_ & 0x00000002) == 0x00000002)) {
9000 output.writeBytes(2, encodedRegionName_);
9001 }
9002 for (int i = 0; i < stores_.size(); i++) {
9003 output.writeMessage(3, stores_.get(i));
9004 }
9005 if (((bitField0_ & 0x00000004) == 0x00000004)) {
9006 output.writeInt64(4, bulkloadSeqNum_);
9007 }
9008 getUnknownFields().writeTo(output);
9009 }
9010
9011 private int memoizedSerializedSize = -1;
9012 public int getSerializedSize() {
9013 int size = memoizedSerializedSize;
9014 if (size != -1) return size;
9015
9016 size = 0;
9017 if (((bitField0_ & 0x00000001) == 0x00000001)) {
9018 size += com.google.protobuf.CodedOutputStream
9019 .computeMessageSize(1, tableName_);
9020 }
9021 if (((bitField0_ & 0x00000002) == 0x00000002)) {
9022 size += com.google.protobuf.CodedOutputStream
9023 .computeBytesSize(2, encodedRegionName_);
9024 }
9025 for (int i = 0; i < stores_.size(); i++) {
9026 size += com.google.protobuf.CodedOutputStream
9027 .computeMessageSize(3, stores_.get(i));
9028 }
9029 if (((bitField0_ & 0x00000004) == 0x00000004)) {
9030 size += com.google.protobuf.CodedOutputStream
9031 .computeInt64Size(4, bulkloadSeqNum_);
9032 }
9033 size += getUnknownFields().getSerializedSize();
9034 memoizedSerializedSize = size;
9035 return size;
9036 }
9037
9038 private static final long serialVersionUID = 0L;
9039 @java.lang.Override
9040 protected java.lang.Object writeReplace()
9041 throws java.io.ObjectStreamException {
9042 return super.writeReplace();
9043 }
9044
9045 @java.lang.Override
9046 public boolean equals(final java.lang.Object obj) {
9047 if (obj == this) {
9048 return true;
9049 }
9050 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor)) {
9051 return super.equals(obj);
9052 }
9053 org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) obj;
9054
9055 boolean result = true;
9056 result = result && (hasTableName() == other.hasTableName());
9057 if (hasTableName()) {
9058 result = result && getTableName()
9059 .equals(other.getTableName());
9060 }
9061 result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
9062 if (hasEncodedRegionName()) {
9063 result = result && getEncodedRegionName()
9064 .equals(other.getEncodedRegionName());
9065 }
9066 result = result && getStoresList()
9067 .equals(other.getStoresList());
9068 result = result && (hasBulkloadSeqNum() == other.hasBulkloadSeqNum());
9069 if (hasBulkloadSeqNum()) {
9070 result = result && (getBulkloadSeqNum()
9071 == other.getBulkloadSeqNum());
9072 }
9073 result = result &&
9074 getUnknownFields().equals(other.getUnknownFields());
9075 return result;
9076 }
9077
9078 private int memoizedHashCode = 0;
9079 @java.lang.Override
9080 public int hashCode() {
9081 if (memoizedHashCode != 0) {
9082 return memoizedHashCode;
9083 }
9084 int hash = 41;
9085 hash = (19 * hash) + getDescriptorForType().hashCode();
9086 if (hasTableName()) {
9087 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
9088 hash = (53 * hash) + getTableName().hashCode();
9089 }
9090 if (hasEncodedRegionName()) {
9091 hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
9092 hash = (53 * hash) + getEncodedRegionName().hashCode();
9093 }
9094 if (getStoresCount() > 0) {
9095 hash = (37 * hash) + STORES_FIELD_NUMBER;
9096 hash = (53 * hash) + getStoresList().hashCode();
9097 }
9098 if (hasBulkloadSeqNum()) {
9099 hash = (37 * hash) + BULKLOAD_SEQ_NUM_FIELD_NUMBER;
9100 hash = (53 * hash) + hashLong(getBulkloadSeqNum());
9101 }
9102 hash = (29 * hash) + getUnknownFields().hashCode();
9103 memoizedHashCode = hash;
9104 return hash;
9105 }
9106
9107 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9108 com.google.protobuf.ByteString data)
9109 throws com.google.protobuf.InvalidProtocolBufferException {
9110 return PARSER.parseFrom(data);
9111 }
9112 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9113 com.google.protobuf.ByteString data,
9114 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9115 throws com.google.protobuf.InvalidProtocolBufferException {
9116 return PARSER.parseFrom(data, extensionRegistry);
9117 }
9118 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(byte[] data)
9119 throws com.google.protobuf.InvalidProtocolBufferException {
9120 return PARSER.parseFrom(data);
9121 }
9122 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9123 byte[] data,
9124 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9125 throws com.google.protobuf.InvalidProtocolBufferException {
9126 return PARSER.parseFrom(data, extensionRegistry);
9127 }
9128 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(java.io.InputStream input)
9129 throws java.io.IOException {
9130 return PARSER.parseFrom(input);
9131 }
9132 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9133 java.io.InputStream input,
9134 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9135 throws java.io.IOException {
9136 return PARSER.parseFrom(input, extensionRegistry);
9137 }
9138 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom(java.io.InputStream input)
9139 throws java.io.IOException {
9140 return PARSER.parseDelimitedFrom(input);
9141 }
9142 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom(
9143 java.io.InputStream input,
9144 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9145 throws java.io.IOException {
9146 return PARSER.parseDelimitedFrom(input, extensionRegistry);
9147 }
9148 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9149 com.google.protobuf.CodedInputStream input)
9150 throws java.io.IOException {
9151 return PARSER.parseFrom(input);
9152 }
9153 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9154 com.google.protobuf.CodedInputStream input,
9155 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9156 throws java.io.IOException {
9157 return PARSER.parseFrom(input, extensionRegistry);
9158 }
9159
9160 public static Builder newBuilder() { return Builder.create(); }
9161 public Builder newBuilderForType() { return newBuilder(); }
9162 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor prototype) {
9163 return newBuilder().mergeFrom(prototype);
9164 }
9165 public Builder toBuilder() { return newBuilder(this); }
9166
9167 @java.lang.Override
9168 protected Builder newBuilderForType(
9169 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9170 Builder builder = new Builder(parent);
9171 return builder;
9172 }
9173
9174
9175
9176
9177
9178
9179
9180
9181 public static final class Builder extends
9182 com.google.protobuf.GeneratedMessage.Builder<Builder>
9183 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptorOrBuilder {
9184 public static final com.google.protobuf.Descriptors.Descriptor
9185 getDescriptor() {
9186 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_descriptor;
9187 }
9188
9189 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9190 internalGetFieldAccessorTable() {
9191 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_fieldAccessorTable
9192 .ensureFieldAccessorsInitialized(
9193 org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class);
9194 }
9195
9196
9197 private Builder() {
9198 maybeForceBuilderInitialization();
9199 }
9200
9201 private Builder(
9202 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9203 super(parent);
9204 maybeForceBuilderInitialization();
9205 }
9206 private void maybeForceBuilderInitialization() {
9207 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9208 getTableNameFieldBuilder();
9209 getStoresFieldBuilder();
9210 }
9211 }
9212 private static Builder create() {
9213 return new Builder();
9214 }
9215
9216 public Builder clear() {
9217 super.clear();
9218 if (tableNameBuilder_ == null) {
9219 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
9220 } else {
9221 tableNameBuilder_.clear();
9222 }
9223 bitField0_ = (bitField0_ & ~0x00000001);
9224 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
9225 bitField0_ = (bitField0_ & ~0x00000002);
9226 if (storesBuilder_ == null) {
9227 stores_ = java.util.Collections.emptyList();
9228 bitField0_ = (bitField0_ & ~0x00000004);
9229 } else {
9230 storesBuilder_.clear();
9231 }
9232 bulkloadSeqNum_ = 0L;
9233 bitField0_ = (bitField0_ & ~0x00000008);
9234 return this;
9235 }
9236
9237 public Builder clone() {
9238 return create().mergeFrom(buildPartial());
9239 }
9240
9241 public com.google.protobuf.Descriptors.Descriptor
9242 getDescriptorForType() {
9243 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_descriptor;
9244 }
9245
9246 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstanceForType() {
9247 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance();
9248 }
9249
9250 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor build() {
9251 org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor result = buildPartial();
9252 if (!result.isInitialized()) {
9253 throw newUninitializedMessageException(result);
9254 }
9255 return result;
9256 }
9257
9258 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor buildPartial() {
9259 org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor(this);
9260 int from_bitField0_ = bitField0_;
9261 int to_bitField0_ = 0;
9262 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9263 to_bitField0_ |= 0x00000001;
9264 }
9265 if (tableNameBuilder_ == null) {
9266 result.tableName_ = tableName_;
9267 } else {
9268 result.tableName_ = tableNameBuilder_.build();
9269 }
9270 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9271 to_bitField0_ |= 0x00000002;
9272 }
9273 result.encodedRegionName_ = encodedRegionName_;
9274 if (storesBuilder_ == null) {
9275 if (((bitField0_ & 0x00000004) == 0x00000004)) {
9276 stores_ = java.util.Collections.unmodifiableList(stores_);
9277 bitField0_ = (bitField0_ & ~0x00000004);
9278 }
9279 result.stores_ = stores_;
9280 } else {
9281 result.stores_ = storesBuilder_.build();
9282 }
9283 if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
9284 to_bitField0_ |= 0x00000004;
9285 }
9286 result.bulkloadSeqNum_ = bulkloadSeqNum_;
9287 result.bitField0_ = to_bitField0_;
9288 onBuilt();
9289 return result;
9290 }
9291
9292 public Builder mergeFrom(com.google.protobuf.Message other) {
9293 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) {
9294 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor)other);
9295 } else {
9296 super.mergeFrom(other);
9297 return this;
9298 }
9299 }
9300
9301 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor other) {
9302 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance()) return this;
9303 if (other.hasTableName()) {
9304 mergeTableName(other.getTableName());
9305 }
9306 if (other.hasEncodedRegionName()) {
9307 setEncodedRegionName(other.getEncodedRegionName());
9308 }
9309 if (storesBuilder_ == null) {
9310 if (!other.stores_.isEmpty()) {
9311 if (stores_.isEmpty()) {
9312 stores_ = other.stores_;
9313 bitField0_ = (bitField0_ & ~0x00000004);
9314 } else {
9315 ensureStoresIsMutable();
9316 stores_.addAll(other.stores_);
9317 }
9318 onChanged();
9319 }
9320 } else {
9321 if (!other.stores_.isEmpty()) {
9322 if (storesBuilder_.isEmpty()) {
9323 storesBuilder_.dispose();
9324 storesBuilder_ = null;
9325 stores_ = other.stores_;
9326 bitField0_ = (bitField0_ & ~0x00000004);
9327 storesBuilder_ =
9328 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
9329 getStoresFieldBuilder() : null;
9330 } else {
9331 storesBuilder_.addAllMessages(other.stores_);
9332 }
9333 }
9334 }
9335 if (other.hasBulkloadSeqNum()) {
9336 setBulkloadSeqNum(other.getBulkloadSeqNum());
9337 }
9338 this.mergeUnknownFields(other.getUnknownFields());
9339 return this;
9340 }
9341
9342 public final boolean isInitialized() {
9343 if (!hasTableName()) {
9344
9345 return false;
9346 }
9347 if (!hasEncodedRegionName()) {
9348
9349 return false;
9350 }
9351 if (!hasBulkloadSeqNum()) {
9352
9353 return false;
9354 }
9355 if (!getTableName().isInitialized()) {
9356
9357 return false;
9358 }
9359 for (int i = 0; i < getStoresCount(); i++) {
9360 if (!getStores(i).isInitialized()) {
9361
9362 return false;
9363 }
9364 }
9365 return true;
9366 }
9367
9368 public Builder mergeFrom(
9369 com.google.protobuf.CodedInputStream input,
9370 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9371 throws java.io.IOException {
9372 org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parsedMessage = null;
9373 try {
9374 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9375 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9376 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) e.getUnfinishedMessage();
9377 throw e;
9378 } finally {
9379 if (parsedMessage != null) {
9380 mergeFrom(parsedMessage);
9381 }
9382 }
9383 return this;
9384 }
9385 private int bitField0_;
9386
9387
9388 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
9389 private com.google.protobuf.SingleFieldBuilder<
9390 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
9391
9392
9393
9394 public boolean hasTableName() {
9395 return ((bitField0_ & 0x00000001) == 0x00000001);
9396 }
9397
9398
9399
9400 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
9401 if (tableNameBuilder_ == null) {
9402 return tableName_;
9403 } else {
9404 return tableNameBuilder_.getMessage();
9405 }
9406 }
9407
9408
9409
9410 public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
9411 if (tableNameBuilder_ == null) {
9412 if (value == null) {
9413 throw new NullPointerException();
9414 }
9415 tableName_ = value;
9416 onChanged();
9417 } else {
9418 tableNameBuilder_.setMessage(value);
9419 }
9420 bitField0_ |= 0x00000001;
9421 return this;
9422 }
9423
9424
9425
9426 public Builder setTableName(
9427 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
9428 if (tableNameBuilder_ == null) {
9429 tableName_ = builderForValue.build();
9430 onChanged();
9431 } else {
9432 tableNameBuilder_.setMessage(builderForValue.build());
9433 }
9434 bitField0_ |= 0x00000001;
9435 return this;
9436 }
9437
9438
9439
9440 public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
9441 if (tableNameBuilder_ == null) {
9442 if (((bitField0_ & 0x00000001) == 0x00000001) &&
9443 tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
9444 tableName_ =
9445 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
9446 } else {
9447 tableName_ = value;
9448 }
9449 onChanged();
9450 } else {
9451 tableNameBuilder_.mergeFrom(value);
9452 }
9453 bitField0_ |= 0x00000001;
9454 return this;
9455 }
9456
9457
9458
9459 public Builder clearTableName() {
9460 if (tableNameBuilder_ == null) {
9461 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
9462 onChanged();
9463 } else {
9464 tableNameBuilder_.clear();
9465 }
9466 bitField0_ = (bitField0_ & ~0x00000001);
9467 return this;
9468 }
9469
9470
9471
9472 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
9473 bitField0_ |= 0x00000001;
9474 onChanged();
9475 return getTableNameFieldBuilder().getBuilder();
9476 }
9477
9478
9479
9480 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
9481 if (tableNameBuilder_ != null) {
9482 return tableNameBuilder_.getMessageOrBuilder();
9483 } else {
9484 return tableName_;
9485 }
9486 }
9487
9488
9489
9490 private com.google.protobuf.SingleFieldBuilder<
9491 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
9492 getTableNameFieldBuilder() {
9493 if (tableNameBuilder_ == null) {
9494 tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9495 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
9496 tableName_,
9497 getParentForChildren(),
9498 isClean());
9499 tableName_ = null;
9500 }
9501 return tableNameBuilder_;
9502 }
9503
9504
9505 private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
9506
9507
9508
9509 public boolean hasEncodedRegionName() {
9510 return ((bitField0_ & 0x00000002) == 0x00000002);
9511 }
9512
9513
9514
9515 public com.google.protobuf.ByteString getEncodedRegionName() {
9516 return encodedRegionName_;
9517 }
9518
9519
9520
9521 public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
9522 if (value == null) {
9523 throw new NullPointerException();
9524 }
9525 bitField0_ |= 0x00000002;
9526 encodedRegionName_ = value;
9527 onChanged();
9528 return this;
9529 }
9530
9531
9532
9533 public Builder clearEncodedRegionName() {
9534 bitField0_ = (bitField0_ & ~0x00000002);
9535 encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
9536 onChanged();
9537 return this;
9538 }
9539
9540
9541 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_ =
9542 java.util.Collections.emptyList();
9543 private void ensureStoresIsMutable() {
9544 if (!((bitField0_ & 0x00000004) == 0x00000004)) {
9545 stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>(stores_);
9546 bitField0_ |= 0x00000004;
9547 }
9548 }
9549
9550 private com.google.protobuf.RepeatedFieldBuilder<
9551 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_;
9552
9553
9554
9555
9556 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() {
9557 if (storesBuilder_ == null) {
9558 return java.util.Collections.unmodifiableList(stores_);
9559 } else {
9560 return storesBuilder_.getMessageList();
9561 }
9562 }
9563
9564
9565
9566 public int getStoresCount() {
9567 if (storesBuilder_ == null) {
9568 return stores_.size();
9569 } else {
9570 return storesBuilder_.getCount();
9571 }
9572 }
9573
9574
9575
9576 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) {
9577 if (storesBuilder_ == null) {
9578 return stores_.get(index);
9579 } else {
9580 return storesBuilder_.getMessage(index);
9581 }
9582 }
9583
9584
9585
9586 public Builder setStores(
9587 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
9588 if (storesBuilder_ == null) {
9589 if (value == null) {
9590 throw new NullPointerException();
9591 }
9592 ensureStoresIsMutable();
9593 stores_.set(index, value);
9594 onChanged();
9595 } else {
9596 storesBuilder_.setMessage(index, value);
9597 }
9598 return this;
9599 }
9600
9601
9602
9603 public Builder setStores(
9604 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
9605 if (storesBuilder_ == null) {
9606 ensureStoresIsMutable();
9607 stores_.set(index, builderForValue.build());
9608 onChanged();
9609 } else {
9610 storesBuilder_.setMessage(index, builderForValue.build());
9611 }
9612 return this;
9613 }
9614
9615
9616
9617 public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
9618 if (storesBuilder_ == null) {
9619 if (value == null) {
9620 throw new NullPointerException();
9621 }
9622 ensureStoresIsMutable();
9623 stores_.add(value);
9624 onChanged();
9625 } else {
9626 storesBuilder_.addMessage(value);
9627 }
9628 return this;
9629 }
9630
9631
9632
9633 public Builder addStores(
9634 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
9635 if (storesBuilder_ == null) {
9636 if (value == null) {
9637 throw new NullPointerException();
9638 }
9639 ensureStoresIsMutable();
9640 stores_.add(index, value);
9641 onChanged();
9642 } else {
9643 storesBuilder_.addMessage(index, value);
9644 }
9645 return this;
9646 }
9647
9648
9649
9650 public Builder addStores(
9651 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
9652 if (storesBuilder_ == null) {
9653 ensureStoresIsMutable();
9654 stores_.add(builderForValue.build());
9655 onChanged();
9656 } else {
9657 storesBuilder_.addMessage(builderForValue.build());
9658 }
9659 return this;
9660 }
9661
9662
9663
9664 public Builder addStores(
9665 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
9666 if (storesBuilder_ == null) {
9667 ensureStoresIsMutable();
9668 stores_.add(index, builderForValue.build());
9669 onChanged();
9670 } else {
9671 storesBuilder_.addMessage(index, builderForValue.build());
9672 }
9673 return this;
9674 }
9675
9676
9677
9678 public Builder addAllStores(
9679 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> values) {
9680 if (storesBuilder_ == null) {
9681 ensureStoresIsMutable();
9682 super.addAll(values, stores_);
9683 onChanged();
9684 } else {
9685 storesBuilder_.addAllMessages(values);
9686 }
9687 return this;
9688 }
9689
9690
9691
9692 public Builder clearStores() {
9693 if (storesBuilder_ == null) {
9694 stores_ = java.util.Collections.emptyList();
9695 bitField0_ = (bitField0_ & ~0x00000004);
9696 onChanged();
9697 } else {
9698 storesBuilder_.clear();
9699 }
9700 return this;
9701 }
9702
9703
9704
9705 public Builder removeStores(int index) {
9706 if (storesBuilder_ == null) {
9707 ensureStoresIsMutable();
9708 stores_.remove(index);
9709 onChanged();
9710 } else {
9711 storesBuilder_.remove(index);
9712 }
9713 return this;
9714 }
9715
9716
9717
9718 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder(
9719 int index) {
9720 return getStoresFieldBuilder().getBuilder(index);
9721 }
9722
9723
9724
9725 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
9726 int index) {
9727 if (storesBuilder_ == null) {
9728 return stores_.get(index); } else {
9729 return storesBuilder_.getMessageOrBuilder(index);
9730 }
9731 }
9732
9733
9734
9735 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>
9736 getStoresOrBuilderList() {
9737 if (storesBuilder_ != null) {
9738 return storesBuilder_.getMessageOrBuilderList();
9739 } else {
9740 return java.util.Collections.unmodifiableList(stores_);
9741 }
9742 }
9743
9744
9745
9746 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() {
9747 return getStoresFieldBuilder().addBuilder(
9748 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance());
9749 }
9750
9751
9752
9753 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder(
9754 int index) {
9755 return getStoresFieldBuilder().addBuilder(
9756 index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance());
9757 }
9758
9759
9760
9761 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder>
9762 getStoresBuilderList() {
9763 return getStoresFieldBuilder().getBuilderList();
9764 }
9765 private com.google.protobuf.RepeatedFieldBuilder<
9766 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>
9767 getStoresFieldBuilder() {
9768 if (storesBuilder_ == null) {
9769 storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
9770 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>(
9771 stores_,
9772 ((bitField0_ & 0x00000004) == 0x00000004),
9773 getParentForChildren(),
9774 isClean());
9775 stores_ = null;
9776 }
9777 return storesBuilder_;
9778 }
9779
9780
9781 private long bulkloadSeqNum_ ;
9782
9783
9784
9785 public boolean hasBulkloadSeqNum() {
9786 return ((bitField0_ & 0x00000008) == 0x00000008);
9787 }
9788
9789
9790
9791 public long getBulkloadSeqNum() {
9792 return bulkloadSeqNum_;
9793 }
9794
9795
9796
9797 public Builder setBulkloadSeqNum(long value) {
9798 bitField0_ |= 0x00000008;
9799 bulkloadSeqNum_ = value;
9800 onChanged();
9801 return this;
9802 }
9803
9804
9805
9806 public Builder clearBulkloadSeqNum() {
9807 bitField0_ = (bitField0_ & ~0x00000008);
9808 bulkloadSeqNum_ = 0L;
9809 onChanged();
9810 return this;
9811 }
9812
9813
9814 }
9815
9816 static {
9817 defaultInstance = new BulkLoadDescriptor(true);
9818 defaultInstance.initFields();
9819 }
9820
9821
9822 }
9823
9824 public interface RegionEventDescriptorOrBuilder
9825 extends com.google.protobuf.MessageOrBuilder {
9826
9827
9828
9829
9830
9831 boolean hasEventType();
9832
9833
9834
9835 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType();
9836
9837
9838
9839
9840
9841 boolean hasTableName();
9842
9843
9844
9845 com.google.protobuf.ByteString getTableName();
9846
9847
9848
9849
9850
9851 boolean hasEncodedRegionName();
9852
9853
9854
9855 com.google.protobuf.ByteString getEncodedRegionName();
9856
9857
9858
9859
9860
9861 boolean hasLogSequenceNumber();
9862
9863
9864
9865 long getLogSequenceNumber();
9866
9867
9868
9869
9870
9871 java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>
9872 getStoresList();
9873
9874
9875
9876 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index);
9877
9878
9879
9880 int getStoresCount();
9881
9882
9883
9884 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>
9885 getStoresOrBuilderList();
9886
9887
9888
9889 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
9890 int index);
9891
9892
9893
9894
9895
9896
9897
9898
9899
9900 boolean hasServer();
9901
9902
9903
9904
9905
9906
9907
9908 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
9909
9910
9911
9912
9913
9914
9915
9916 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
9917
9918
9919
9920
9921
9922
9923
9924
9925
9926 boolean hasRegionName();
9927
9928
9929
9930
9931
9932
9933
9934 com.google.protobuf.ByteString getRegionName();
9935 }
9936
9937
9938
9939
9940
9941
9942
9943
9944 public static final class RegionEventDescriptor extends
9945 com.google.protobuf.GeneratedMessage
9946 implements RegionEventDescriptorOrBuilder {
9947
9948 private RegionEventDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9949 super(builder);
9950 this.unknownFields = builder.getUnknownFields();
9951 }
9952 private RegionEventDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9953
9954 private static final RegionEventDescriptor defaultInstance;
9955 public static RegionEventDescriptor getDefaultInstance() {
9956 return defaultInstance;
9957 }
9958
9959 public RegionEventDescriptor getDefaultInstanceForType() {
9960 return defaultInstance;
9961 }
9962
9963 private final com.google.protobuf.UnknownFieldSet unknownFields;
9964 @java.lang.Override
9965 public final com.google.protobuf.UnknownFieldSet
9966 getUnknownFields() {
9967 return this.unknownFields;
9968 }
9969 private RegionEventDescriptor(
9970 com.google.protobuf.CodedInputStream input,
9971 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9972 throws com.google.protobuf.InvalidProtocolBufferException {
9973 initFields();
9974 int mutable_bitField0_ = 0;
9975 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9976 com.google.protobuf.UnknownFieldSet.newBuilder();
9977 try {
9978 boolean done = false;
9979 while (!done) {
9980 int tag = input.readTag();
9981 switch (tag) {
9982 case 0:
9983 done = true;
9984 break;
9985 default: {
9986 if (!parseUnknownField(input, unknownFields,
9987 extensionRegistry, tag)) {
9988 done = true;
9989 }
9990 break;
9991 }
9992 case 8: {
9993 int rawValue = input.readEnum();
9994 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(rawValue);
9995 if (value == null) {
9996 unknownFields.mergeVarintField(1, rawValue);
9997 } else {
9998 bitField0_ |= 0x00000001;
9999 eventType_ = value;
10000 }
10001 break;
10002 }
10003 case 18: {
10004 bitField0_ |= 0x00000002;
10005 tableName_ = input.readBytes();
10006 break;
10007 }
10008 case 26: {
10009 bitField0_ |= 0x00000004;
10010 encodedRegionName_ = input.readBytes();
10011 break;
10012 }
10013 case 32: {
10014 bitField0_ |= 0x00000008;
10015 logSequenceNumber_ = input.readUInt64();
10016 break;
10017 }
10018 case 42: {
10019 if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
10020 stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>();
10021 mutable_bitField0_ |= 0x00000010;
10022 }
10023 stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry));
10024 break;
10025 }
10026 case 50: {
10027 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
10028 if (((bitField0_ & 0x00000010) == 0x00000010)) {
10029 subBuilder = server_.toBuilder();
10030 }
10031 server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
10032 if (subBuilder != null) {
10033 subBuilder.mergeFrom(server_);
10034 server_ = subBuilder.buildPartial();
10035 }
10036 bitField0_ |= 0x00000010;
10037 break;
10038 }
10039 case 58: {
10040 bitField0_ |= 0x00000020;
10041 regionName_ = input.readBytes();
10042 break;
10043 }
10044 }
10045 }
10046 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10047 throw e.setUnfinishedMessage(this);
10048 } catch (java.io.IOException e) {
10049 throw new com.google.protobuf.InvalidProtocolBufferException(
10050 e.getMessage()).setUnfinishedMessage(this);
10051 } finally {
10052 if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
10053 stores_ = java.util.Collections.unmodifiableList(stores_);
10054 }
10055 this.unknownFields = unknownFields.build();
10056 makeExtensionsImmutable();
10057 }
10058 }
10059 public static final com.google.protobuf.Descriptors.Descriptor
10060 getDescriptor() {
10061 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor;
10062 }
10063
10064 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10065 internalGetFieldAccessorTable() {
10066 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_fieldAccessorTable
10067 .ensureFieldAccessorsInitialized(
10068 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class);
10069 }
10070
10071 public static com.google.protobuf.Parser<RegionEventDescriptor> PARSER =
10072 new com.google.protobuf.AbstractParser<RegionEventDescriptor>() {
10073 public RegionEventDescriptor parsePartialFrom(
10074 com.google.protobuf.CodedInputStream input,
10075 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10076 throws com.google.protobuf.InvalidProtocolBufferException {
10077 return new RegionEventDescriptor(input, extensionRegistry);
10078 }
10079 };
10080
10081 @java.lang.Override
10082 public com.google.protobuf.Parser<RegionEventDescriptor> getParserForType() {
10083 return PARSER;
10084 }
10085
10086
10087
10088
10089 public enum EventType
10090 implements com.google.protobuf.ProtocolMessageEnum {
10091
10092
10093
10094 REGION_OPEN(0, 0),
10095
10096
10097
10098 REGION_CLOSE(1, 1),
10099 ;
10100
10101
10102
10103
10104 public static final int REGION_OPEN_VALUE = 0;
10105
10106
10107
10108 public static final int REGION_CLOSE_VALUE = 1;
10109
10110
10111 public final int getNumber() { return value; }
10112
10113 public static EventType valueOf(int value) {
10114 switch (value) {
10115 case 0: return REGION_OPEN;
10116 case 1: return REGION_CLOSE;
10117 default: return null;
10118 }
10119 }
10120
10121 public static com.google.protobuf.Internal.EnumLiteMap<EventType>
10122 internalGetValueMap() {
10123 return internalValueMap;
10124 }
10125 private static com.google.protobuf.Internal.EnumLiteMap<EventType>
10126 internalValueMap =
10127 new com.google.protobuf.Internal.EnumLiteMap<EventType>() {
10128 public EventType findValueByNumber(int number) {
10129 return EventType.valueOf(number);
10130 }
10131 };
10132
10133 public final com.google.protobuf.Descriptors.EnumValueDescriptor
10134 getValueDescriptor() {
10135 return getDescriptor().getValues().get(index);
10136 }
10137 public final com.google.protobuf.Descriptors.EnumDescriptor
10138 getDescriptorForType() {
10139 return getDescriptor();
10140 }
10141 public static final com.google.protobuf.Descriptors.EnumDescriptor
10142 getDescriptor() {
10143 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDescriptor().getEnumTypes().get(0);
10144 }
10145
10146 private static final EventType[] VALUES = values();
10147
10148 public static EventType valueOf(
10149 com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
10150 if (desc.getType() != getDescriptor()) {
10151 throw new java.lang.IllegalArgumentException(
10152 "EnumValueDescriptor is not for this type.");
10153 }
10154 return VALUES[desc.getIndex()];
10155 }
10156
10157 private final int index;
10158 private final int value;
10159
10160 private EventType(int index, int value) {
10161 this.index = index;
10162 this.value = value;
10163 }
10164
10165
10166 }
10167
10168 private int bitField0_;
10169
10170 public static final int EVENT_TYPE_FIELD_NUMBER = 1;
10171 private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_;
10172
10173
10174
10175 public boolean hasEventType() {
10176 return ((bitField0_ & 0x00000001) == 0x00000001);
10177 }
10178
10179
10180
10181 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() {
10182 return eventType_;
10183 }
10184
10185
10186 public static final int TABLE_NAME_FIELD_NUMBER = 2;
10187 private com.google.protobuf.ByteString tableName_;
10188
10189
10190
10191 public boolean hasTableName() {
10192 return ((bitField0_ & 0x00000002) == 0x00000002);
10193 }
10194
10195
10196
10197 public com.google.protobuf.ByteString getTableName() {
10198 return tableName_;
10199 }
10200
10201
10202 public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3;
10203 private com.google.protobuf.ByteString encodedRegionName_;
10204
10205
10206
10207 public boolean hasEncodedRegionName() {
10208 return ((bitField0_ & 0x00000004) == 0x00000004);
10209 }
10210
10211
10212
10213 public com.google.protobuf.ByteString getEncodedRegionName() {
10214 return encodedRegionName_;
10215 }
10216
10217
10218 public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 4;
10219 private long logSequenceNumber_;
10220
10221
10222
10223 public boolean hasLogSequenceNumber() {
10224 return ((bitField0_ & 0x00000008) == 0x00000008);
10225 }
10226
10227
10228
10229 public long getLogSequenceNumber() {
10230 return logSequenceNumber_;
10231 }
10232
10233
10234 public static final int STORES_FIELD_NUMBER = 5;
10235 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_;
10236
10237
10238
10239 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() {
10240 return stores_;
10241 }
10242
10243
10244
10245 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>
10246 getStoresOrBuilderList() {
10247 return stores_;
10248 }
10249
10250
10251
10252 public int getStoresCount() {
10253 return stores_.size();
10254 }
10255
10256
10257
10258 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) {
10259 return stores_.get(index);
10260 }
10261
10262
10263
10264 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
10265 int index) {
10266 return stores_.get(index);
10267 }
10268
10269
10270 public static final int SERVER_FIELD_NUMBER = 6;
10271 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
10272
10273
10274
10275
10276
10277
10278
10279 public boolean hasServer() {
10280 return ((bitField0_ & 0x00000010) == 0x00000010);
10281 }
10282
10283
10284
10285
10286
10287
10288
10289 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
10290 return server_;
10291 }
10292
10293
10294
10295
10296
10297
10298
10299 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
10300 return server_;
10301 }
10302
10303
10304 public static final int REGION_NAME_FIELD_NUMBER = 7;
10305 private com.google.protobuf.ByteString regionName_;
10306
10307
10308
10309
10310
10311
10312
10313 public boolean hasRegionName() {
10314 return ((bitField0_ & 0x00000020) == 0x00000020);
10315 }
10316
10317
10318
10319
10320
10321
10322
10323 public com.google.protobuf.ByteString getRegionName() {
10324 return regionName_;
10325 }
10326
10327 private void initFields() {
10328 eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
10329 tableName_ = com.google.protobuf.ByteString.EMPTY;
10330 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
10331 logSequenceNumber_ = 0L;
10332 stores_ = java.util.Collections.emptyList();
10333 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
10334 regionName_ = com.google.protobuf.ByteString.EMPTY;
10335 }
10336 private byte memoizedIsInitialized = -1;
10337 public final boolean isInitialized() {
10338 byte isInitialized = memoizedIsInitialized;
10339 if (isInitialized != -1) return isInitialized == 1;
10340
10341 if (!hasEventType()) {
10342 memoizedIsInitialized = 0;
10343 return false;
10344 }
10345 if (!hasTableName()) {
10346 memoizedIsInitialized = 0;
10347 return false;
10348 }
10349 if (!hasEncodedRegionName()) {
10350 memoizedIsInitialized = 0;
10351 return false;
10352 }
10353 for (int i = 0; i < getStoresCount(); i++) {
10354 if (!getStores(i).isInitialized()) {
10355 memoizedIsInitialized = 0;
10356 return false;
10357 }
10358 }
10359 if (hasServer()) {
10360 if (!getServer().isInitialized()) {
10361 memoizedIsInitialized = 0;
10362 return false;
10363 }
10364 }
10365 memoizedIsInitialized = 1;
10366 return true;
10367 }
10368
10369 public void writeTo(com.google.protobuf.CodedOutputStream output)
10370 throws java.io.IOException {
10371 getSerializedSize();
10372 if (((bitField0_ & 0x00000001) == 0x00000001)) {
10373 output.writeEnum(1, eventType_.getNumber());
10374 }
10375 if (((bitField0_ & 0x00000002) == 0x00000002)) {
10376 output.writeBytes(2, tableName_);
10377 }
10378 if (((bitField0_ & 0x00000004) == 0x00000004)) {
10379 output.writeBytes(3, encodedRegionName_);
10380 }
10381 if (((bitField0_ & 0x00000008) == 0x00000008)) {
10382 output.writeUInt64(4, logSequenceNumber_);
10383 }
10384 for (int i = 0; i < stores_.size(); i++) {
10385 output.writeMessage(5, stores_.get(i));
10386 }
10387 if (((bitField0_ & 0x00000010) == 0x00000010)) {
10388 output.writeMessage(6, server_);
10389 }
10390 if (((bitField0_ & 0x00000020) == 0x00000020)) {
10391 output.writeBytes(7, regionName_);
10392 }
10393 getUnknownFields().writeTo(output);
10394 }
10395
10396 private int memoizedSerializedSize = -1;
10397 public int getSerializedSize() {
10398 int size = memoizedSerializedSize;
10399 if (size != -1) return size;
10400
10401 size = 0;
10402 if (((bitField0_ & 0x00000001) == 0x00000001)) {
10403 size += com.google.protobuf.CodedOutputStream
10404 .computeEnumSize(1, eventType_.getNumber());
10405 }
10406 if (((bitField0_ & 0x00000002) == 0x00000002)) {
10407 size += com.google.protobuf.CodedOutputStream
10408 .computeBytesSize(2, tableName_);
10409 }
10410 if (((bitField0_ & 0x00000004) == 0x00000004)) {
10411 size += com.google.protobuf.CodedOutputStream
10412 .computeBytesSize(3, encodedRegionName_);
10413 }
10414 if (((bitField0_ & 0x00000008) == 0x00000008)) {
10415 size += com.google.protobuf.CodedOutputStream
10416 .computeUInt64Size(4, logSequenceNumber_);
10417 }
10418 for (int i = 0; i < stores_.size(); i++) {
10419 size += com.google.protobuf.CodedOutputStream
10420 .computeMessageSize(5, stores_.get(i));
10421 }
10422 if (((bitField0_ & 0x00000010) == 0x00000010)) {
10423 size += com.google.protobuf.CodedOutputStream
10424 .computeMessageSize(6, server_);
10425 }
10426 if (((bitField0_ & 0x00000020) == 0x00000020)) {
10427 size += com.google.protobuf.CodedOutputStream
10428 .computeBytesSize(7, regionName_);
10429 }
10430 size += getUnknownFields().getSerializedSize();
10431 memoizedSerializedSize = size;
10432 return size;
10433 }
10434
10435 private static final long serialVersionUID = 0L;
10436 @java.lang.Override
10437 protected java.lang.Object writeReplace()
10438 throws java.io.ObjectStreamException {
10439 return super.writeReplace();
10440 }
10441
10442 @java.lang.Override
10443 public boolean equals(final java.lang.Object obj) {
10444 if (obj == this) {
10445 return true;
10446 }
10447 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)) {
10448 return super.equals(obj);
10449 }
10450 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) obj;
10451
10452 boolean result = true;
10453 result = result && (hasEventType() == other.hasEventType());
10454 if (hasEventType()) {
10455 result = result &&
10456 (getEventType() == other.getEventType());
10457 }
10458 result = result && (hasTableName() == other.hasTableName());
10459 if (hasTableName()) {
10460 result = result && getTableName()
10461 .equals(other.getTableName());
10462 }
10463 result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
10464 if (hasEncodedRegionName()) {
10465 result = result && getEncodedRegionName()
10466 .equals(other.getEncodedRegionName());
10467 }
10468 result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
10469 if (hasLogSequenceNumber()) {
10470 result = result && (getLogSequenceNumber()
10471 == other.getLogSequenceNumber());
10472 }
10473 result = result && getStoresList()
10474 .equals(other.getStoresList());
10475 result = result && (hasServer() == other.hasServer());
10476 if (hasServer()) {
10477 result = result && getServer()
10478 .equals(other.getServer());
10479 }
10480 result = result && (hasRegionName() == other.hasRegionName());
10481 if (hasRegionName()) {
10482 result = result && getRegionName()
10483 .equals(other.getRegionName());
10484 }
10485 result = result &&
10486 getUnknownFields().equals(other.getUnknownFields());
10487 return result;
10488 }
10489
10490 private int memoizedHashCode = 0;
10491 @java.lang.Override
10492 public int hashCode() {
10493 if (memoizedHashCode != 0) {
10494 return memoizedHashCode;
10495 }
10496 int hash = 41;
10497 hash = (19 * hash) + getDescriptorForType().hashCode();
10498 if (hasEventType()) {
10499 hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER;
10500 hash = (53 * hash) + hashEnum(getEventType());
10501 }
10502 if (hasTableName()) {
10503 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
10504 hash = (53 * hash) + getTableName().hashCode();
10505 }
10506 if (hasEncodedRegionName()) {
10507 hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
10508 hash = (53 * hash) + getEncodedRegionName().hashCode();
10509 }
10510 if (hasLogSequenceNumber()) {
10511 hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
10512 hash = (53 * hash) + hashLong(getLogSequenceNumber());
10513 }
10514 if (getStoresCount() > 0) {
10515 hash = (37 * hash) + STORES_FIELD_NUMBER;
10516 hash = (53 * hash) + getStoresList().hashCode();
10517 }
10518 if (hasServer()) {
10519 hash = (37 * hash) + SERVER_FIELD_NUMBER;
10520 hash = (53 * hash) + getServer().hashCode();
10521 }
10522 if (hasRegionName()) {
10523 hash = (37 * hash) + REGION_NAME_FIELD_NUMBER;
10524 hash = (53 * hash) + getRegionName().hashCode();
10525 }
10526 hash = (29 * hash) + getUnknownFields().hashCode();
10527 memoizedHashCode = hash;
10528 return hash;
10529 }
10530
10531 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10532 com.google.protobuf.ByteString data)
10533 throws com.google.protobuf.InvalidProtocolBufferException {
10534 return PARSER.parseFrom(data);
10535 }
10536 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10537 com.google.protobuf.ByteString data,
10538 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10539 throws com.google.protobuf.InvalidProtocolBufferException {
10540 return PARSER.parseFrom(data, extensionRegistry);
10541 }
10542 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(byte[] data)
10543 throws com.google.protobuf.InvalidProtocolBufferException {
10544 return PARSER.parseFrom(data);
10545 }
10546 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10547 byte[] data,
10548 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10549 throws com.google.protobuf.InvalidProtocolBufferException {
10550 return PARSER.parseFrom(data, extensionRegistry);
10551 }
10552 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(java.io.InputStream input)
10553 throws java.io.IOException {
10554 return PARSER.parseFrom(input);
10555 }
10556 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10557 java.io.InputStream input,
10558 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10559 throws java.io.IOException {
10560 return PARSER.parseFrom(input, extensionRegistry);
10561 }
10562 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(java.io.InputStream input)
10563 throws java.io.IOException {
10564 return PARSER.parseDelimitedFrom(input);
10565 }
10566 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(
10567 java.io.InputStream input,
10568 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10569 throws java.io.IOException {
10570 return PARSER.parseDelimitedFrom(input, extensionRegistry);
10571 }
10572 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10573 com.google.protobuf.CodedInputStream input)
10574 throws java.io.IOException {
10575 return PARSER.parseFrom(input);
10576 }
10577 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10578 com.google.protobuf.CodedInputStream input,
10579 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10580 throws java.io.IOException {
10581 return PARSER.parseFrom(input, extensionRegistry);
10582 }
10583
10584 public static Builder newBuilder() { return Builder.create(); }
10585 public Builder newBuilderForType() { return newBuilder(); }
10586 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor prototype) {
10587 return newBuilder().mergeFrom(prototype);
10588 }
10589 public Builder toBuilder() { return newBuilder(this); }
10590
10591 @java.lang.Override
10592 protected Builder newBuilderForType(
10593 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10594 Builder builder = new Builder(parent);
10595 return builder;
10596 }
10597
10598
10599
10600
10601
10602
10603
10604
10605 public static final class Builder extends
10606 com.google.protobuf.GeneratedMessage.Builder<Builder>
10607 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder {
10608 public static final com.google.protobuf.Descriptors.Descriptor
10609 getDescriptor() {
10610 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor;
10611 }
10612
10613 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10614 internalGetFieldAccessorTable() {
10615 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_fieldAccessorTable
10616 .ensureFieldAccessorsInitialized(
10617 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class);
10618 }
10619
10620
10621 private Builder() {
10622 maybeForceBuilderInitialization();
10623 }
10624
10625 private Builder(
10626 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10627 super(parent);
10628 maybeForceBuilderInitialization();
10629 }
10630 private void maybeForceBuilderInitialization() {
10631 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10632 getStoresFieldBuilder();
10633 getServerFieldBuilder();
10634 }
10635 }
10636 private static Builder create() {
10637 return new Builder();
10638 }
10639
10640 public Builder clear() {
10641 super.clear();
10642 eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
10643 bitField0_ = (bitField0_ & ~0x00000001);
10644 tableName_ = com.google.protobuf.ByteString.EMPTY;
10645 bitField0_ = (bitField0_ & ~0x00000002);
10646 encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
10647 bitField0_ = (bitField0_ & ~0x00000004);
10648 logSequenceNumber_ = 0L;
10649 bitField0_ = (bitField0_ & ~0x00000008);
10650 if (storesBuilder_ == null) {
10651 stores_ = java.util.Collections.emptyList();
10652 bitField0_ = (bitField0_ & ~0x00000010);
10653 } else {
10654 storesBuilder_.clear();
10655 }
10656 if (serverBuilder_ == null) {
10657 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
10658 } else {
10659 serverBuilder_.clear();
10660 }
10661 bitField0_ = (bitField0_ & ~0x00000020);
10662 regionName_ = com.google.protobuf.ByteString.EMPTY;
10663 bitField0_ = (bitField0_ & ~0x00000040);
10664 return this;
10665 }
10666
10667 public Builder clone() {
10668 return create().mergeFrom(buildPartial());
10669 }
10670
10671 public com.google.protobuf.Descriptors.Descriptor
10672 getDescriptorForType() {
10673 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor;
10674 }
10675
10676 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() {
10677 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance();
10678 }
10679
10680 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor build() {
10681 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = buildPartial();
10682 if (!result.isInitialized()) {
10683 throw newUninitializedMessageException(result);
10684 }
10685 return result;
10686 }
10687
10688 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor buildPartial() {
10689 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor(this);
10690 int from_bitField0_ = bitField0_;
10691 int to_bitField0_ = 0;
10692 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10693 to_bitField0_ |= 0x00000001;
10694 }
10695 result.eventType_ = eventType_;
10696 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10697 to_bitField0_ |= 0x00000002;
10698 }
10699 result.tableName_ = tableName_;
10700 if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
10701 to_bitField0_ |= 0x00000004;
10702 }
10703 result.encodedRegionName_ = encodedRegionName_;
10704 if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
10705 to_bitField0_ |= 0x00000008;
10706 }
10707 result.logSequenceNumber_ = logSequenceNumber_;
10708 if (storesBuilder_ == null) {
10709 if (((bitField0_ & 0x00000010) == 0x00000010)) {
10710 stores_ = java.util.Collections.unmodifiableList(stores_);
10711 bitField0_ = (bitField0_ & ~0x00000010);
10712 }
10713 result.stores_ = stores_;
10714 } else {
10715 result.stores_ = storesBuilder_.build();
10716 }
10717 if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
10718 to_bitField0_ |= 0x00000010;
10719 }
10720 if (serverBuilder_ == null) {
10721 result.server_ = server_;
10722 } else {
10723 result.server_ = serverBuilder_.build();
10724 }
10725 if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
10726 to_bitField0_ |= 0x00000020;
10727 }
10728 result.regionName_ = regionName_;
10729 result.bitField0_ = to_bitField0_;
10730 onBuilt();
10731 return result;
10732 }
10733
10734 public Builder mergeFrom(com.google.protobuf.Message other) {
10735 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) {
10736 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)other);
10737 } else {
10738 super.mergeFrom(other);
10739 return this;
10740 }
10741 }
10742
10743 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other) {
10744 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance()) return this;
10745 if (other.hasEventType()) {
10746 setEventType(other.getEventType());
10747 }
10748 if (other.hasTableName()) {
10749 setTableName(other.getTableName());
10750 }
10751 if (other.hasEncodedRegionName()) {
10752 setEncodedRegionName(other.getEncodedRegionName());
10753 }
10754 if (other.hasLogSequenceNumber()) {
10755 setLogSequenceNumber(other.getLogSequenceNumber());
10756 }
10757 if (storesBuilder_ == null) {
10758 if (!other.stores_.isEmpty()) {
10759 if (stores_.isEmpty()) {
10760 stores_ = other.stores_;
10761 bitField0_ = (bitField0_ & ~0x00000010);
10762 } else {
10763 ensureStoresIsMutable();
10764 stores_.addAll(other.stores_);
10765 }
10766 onChanged();
10767 }
10768 } else {
10769 if (!other.stores_.isEmpty()) {
10770 if (storesBuilder_.isEmpty()) {
10771 storesBuilder_.dispose();
10772 storesBuilder_ = null;
10773 stores_ = other.stores_;
10774 bitField0_ = (bitField0_ & ~0x00000010);
10775 storesBuilder_ =
10776 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
10777 getStoresFieldBuilder() : null;
10778 } else {
10779 storesBuilder_.addAllMessages(other.stores_);
10780 }
10781 }
10782 }
10783 if (other.hasServer()) {
10784 mergeServer(other.getServer());
10785 }
10786 if (other.hasRegionName()) {
10787 setRegionName(other.getRegionName());
10788 }
10789 this.mergeUnknownFields(other.getUnknownFields());
10790 return this;
10791 }
10792
10793 public final boolean isInitialized() {
10794 if (!hasEventType()) {
10795
10796 return false;
10797 }
10798 if (!hasTableName()) {
10799
10800 return false;
10801 }
10802 if (!hasEncodedRegionName()) {
10803
10804 return false;
10805 }
10806 for (int i = 0; i < getStoresCount(); i++) {
10807 if (!getStores(i).isInitialized()) {
10808
10809 return false;
10810 }
10811 }
10812 if (hasServer()) {
10813 if (!getServer().isInitialized()) {
10814
10815 return false;
10816 }
10817 }
10818 return true;
10819 }
10820
10821 public Builder mergeFrom(
10822 com.google.protobuf.CodedInputStream input,
10823 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10824 throws java.io.IOException {
10825 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parsedMessage = null;
10826 try {
10827 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10828 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10829 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) e.getUnfinishedMessage();
10830 throw e;
10831 } finally {
10832 if (parsedMessage != null) {
10833 mergeFrom(parsedMessage);
10834 }
10835 }
10836 return this;
10837 }
10838 private int bitField0_;
10839
10840
10841 private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
10842
10843
10844
10845 public boolean hasEventType() {
10846 return ((bitField0_ & 0x00000001) == 0x00000001);
10847 }
10848
10849
10850
10851 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() {
10852 return eventType_;
10853 }
10854
10855
10856
10857 public Builder setEventType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value) {
10858 if (value == null) {
10859 throw new NullPointerException();
10860 }
10861 bitField0_ |= 0x00000001;
10862 eventType_ = value;
10863 onChanged();
10864 return this;
10865 }
10866
10867
10868
10869 public Builder clearEventType() {
10870 bitField0_ = (bitField0_ & ~0x00000001);
10871 eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
10872 onChanged();
10873 return this;
10874 }
10875
10876
10877 private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
10878
10879
10880
10881 public boolean hasTableName() {
10882 return ((bitField0_ & 0x00000002) == 0x00000002);
10883 }
10884
10885
10886
10887 public com.google.protobuf.ByteString getTableName() {
10888 return tableName_;
10889 }
10890
10891
10892
10893 public Builder setTableName(com.google.protobuf.ByteString value) {
10894 if (value == null) {
10895 throw new NullPointerException();
10896 }
10897 bitField0_ |= 0x00000002;
10898 tableName_ = value;
10899 onChanged();
10900 return this;
10901 }
10902
10903
10904
10905 public Builder clearTableName() {
10906 bitField0_ = (bitField0_ & ~0x00000002);
10907 tableName_ = getDefaultInstance().getTableName();
10908 onChanged();
10909 return this;
10910 }
10911
10912
10913 private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
10914
10915
10916
10917 public boolean hasEncodedRegionName() {
10918 return ((bitField0_ & 0x00000004) == 0x00000004);
10919 }
10920
10921
10922
10923 public com.google.protobuf.ByteString getEncodedRegionName() {
10924 return encodedRegionName_;
10925 }
10926
10927
10928
10929 public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
10930 if (value == null) {
10931 throw new NullPointerException();
10932 }
10933 bitField0_ |= 0x00000004;
10934 encodedRegionName_ = value;
10935 onChanged();
10936 return this;
10937 }
10938
10939
10940
10941 public Builder clearEncodedRegionName() {
10942 bitField0_ = (bitField0_ & ~0x00000004);
10943 encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
10944 onChanged();
10945 return this;
10946 }
10947
10948
10949 private long logSequenceNumber_ ;
10950
10951
10952
10953 public boolean hasLogSequenceNumber() {
10954 return ((bitField0_ & 0x00000008) == 0x00000008);
10955 }
10956
10957
10958
10959 public long getLogSequenceNumber() {
10960 return logSequenceNumber_;
10961 }
10962
10963
10964
10965 public Builder setLogSequenceNumber(long value) {
10966 bitField0_ |= 0x00000008;
10967 logSequenceNumber_ = value;
10968 onChanged();
10969 return this;
10970 }
10971
10972
10973
10974 public Builder clearLogSequenceNumber() {
10975 bitField0_ = (bitField0_ & ~0x00000008);
10976 logSequenceNumber_ = 0L;
10977 onChanged();
10978 return this;
10979 }
10980
10981
10982 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_ =
10983 java.util.Collections.emptyList();
10984 private void ensureStoresIsMutable() {
10985 if (!((bitField0_ & 0x00000010) == 0x00000010)) {
10986 stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>(stores_);
10987 bitField0_ |= 0x00000010;
10988 }
10989 }
10990
10991 private com.google.protobuf.RepeatedFieldBuilder<
10992 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_;
10993
10994
10995
10996
10997 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() {
10998 if (storesBuilder_ == null) {
10999 return java.util.Collections.unmodifiableList(stores_);
11000 } else {
11001 return storesBuilder_.getMessageList();
11002 }
11003 }
11004
11005
11006
11007 public int getStoresCount() {
11008 if (storesBuilder_ == null) {
11009 return stores_.size();
11010 } else {
11011 return storesBuilder_.getCount();
11012 }
11013 }
11014
11015
11016
11017 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) {
11018 if (storesBuilder_ == null) {
11019 return stores_.get(index);
11020 } else {
11021 return storesBuilder_.getMessage(index);
11022 }
11023 }
11024
11025
11026
11027 public Builder setStores(
11028 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
11029 if (storesBuilder_ == null) {
11030 if (value == null) {
11031 throw new NullPointerException();
11032 }
11033 ensureStoresIsMutable();
11034 stores_.set(index, value);
11035 onChanged();
11036 } else {
11037 storesBuilder_.setMessage(index, value);
11038 }
11039 return this;
11040 }
11041
11042
11043
11044 public Builder setStores(
11045 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
11046 if (storesBuilder_ == null) {
11047 ensureStoresIsMutable();
11048 stores_.set(index, builderForValue.build());
11049 onChanged();
11050 } else {
11051 storesBuilder_.setMessage(index, builderForValue.build());
11052 }
11053 return this;
11054 }
11055
11056
11057
11058 public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
11059 if (storesBuilder_ == null) {
11060 if (value == null) {
11061 throw new NullPointerException();
11062 }
11063 ensureStoresIsMutable();
11064 stores_.add(value);
11065 onChanged();
11066 } else {
11067 storesBuilder_.addMessage(value);
11068 }
11069 return this;
11070 }
11071
11072
11073
11074 public Builder addStores(
11075 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
11076 if (storesBuilder_ == null) {
11077 if (value == null) {
11078 throw new NullPointerException();
11079 }
11080 ensureStoresIsMutable();
11081 stores_.add(index, value);
11082 onChanged();
11083 } else {
11084 storesBuilder_.addMessage(index, value);
11085 }
11086 return this;
11087 }
11088
11089
11090
11091 public Builder addStores(
11092 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
11093 if (storesBuilder_ == null) {
11094 ensureStoresIsMutable();
11095 stores_.add(builderForValue.build());
11096 onChanged();
11097 } else {
11098 storesBuilder_.addMessage(builderForValue.build());
11099 }
11100 return this;
11101 }
11102
11103
11104
11105 public Builder addStores(
11106 int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
11107 if (storesBuilder_ == null) {
11108 ensureStoresIsMutable();
11109 stores_.add(index, builderForValue.build());
11110 onChanged();
11111 } else {
11112 storesBuilder_.addMessage(index, builderForValue.build());
11113 }
11114 return this;
11115 }
11116
11117
11118
11119 public Builder addAllStores(
11120 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> values) {
11121 if (storesBuilder_ == null) {
11122 ensureStoresIsMutable();
11123 super.addAll(values, stores_);
11124 onChanged();
11125 } else {
11126 storesBuilder_.addAllMessages(values);
11127 }
11128 return this;
11129 }
11130
11131
11132
11133 public Builder clearStores() {
11134 if (storesBuilder_ == null) {
11135 stores_ = java.util.Collections.emptyList();
11136 bitField0_ = (bitField0_ & ~0x00000010);
11137 onChanged();
11138 } else {
11139 storesBuilder_.clear();
11140 }
11141 return this;
11142 }
11143
11144
11145
11146 public Builder removeStores(int index) {
11147 if (storesBuilder_ == null) {
11148 ensureStoresIsMutable();
11149 stores_.remove(index);
11150 onChanged();
11151 } else {
11152 storesBuilder_.remove(index);
11153 }
11154 return this;
11155 }
11156
11157
11158
11159 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder(
11160 int index) {
11161 return getStoresFieldBuilder().getBuilder(index);
11162 }
11163
11164
11165
11166 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
11167 int index) {
11168 if (storesBuilder_ == null) {
11169 return stores_.get(index); } else {
11170 return storesBuilder_.getMessageOrBuilder(index);
11171 }
11172 }
11173
11174
11175
11176 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>
11177 getStoresOrBuilderList() {
11178 if (storesBuilder_ != null) {
11179 return storesBuilder_.getMessageOrBuilderList();
11180 } else {
11181 return java.util.Collections.unmodifiableList(stores_);
11182 }
11183 }
11184
11185
11186
11187 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() {
11188 return getStoresFieldBuilder().addBuilder(
11189 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance());
11190 }
11191
11192
11193
11194 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder(
11195 int index) {
11196 return getStoresFieldBuilder().addBuilder(
11197 index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance());
11198 }
11199
11200
11201
11202 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder>
11203 getStoresBuilderList() {
11204 return getStoresFieldBuilder().getBuilderList();
11205 }
11206 private com.google.protobuf.RepeatedFieldBuilder<
11207 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>
11208 getStoresFieldBuilder() {
11209 if (storesBuilder_ == null) {
11210 storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
11211 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>(
11212 stores_,
11213 ((bitField0_ & 0x00000010) == 0x00000010),
11214 getParentForChildren(),
11215 isClean());
11216 stores_ = null;
11217 }
11218 return storesBuilder_;
11219 }
11220
11221
11222 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
11223 private com.google.protobuf.SingleFieldBuilder<
11224 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_;
11225
11226
11227
11228
11229
11230
11231
11232 public boolean hasServer() {
11233 return ((bitField0_ & 0x00000020) == 0x00000020);
11234 }
11235
11236
11237
11238
11239
11240
11241
11242 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
11243 if (serverBuilder_ == null) {
11244 return server_;
11245 } else {
11246 return serverBuilder_.getMessage();
11247 }
11248 }
11249
11250
11251
11252
11253
11254
11255
11256 public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
11257 if (serverBuilder_ == null) {
11258 if (value == null) {
11259 throw new NullPointerException();
11260 }
11261 server_ = value;
11262 onChanged();
11263 } else {
11264 serverBuilder_.setMessage(value);
11265 }
11266 bitField0_ |= 0x00000020;
11267 return this;
11268 }
11269
11270
11271
11272
11273
11274
11275
11276 public Builder setServer(
11277 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
11278 if (serverBuilder_ == null) {
11279 server_ = builderForValue.build();
11280 onChanged();
11281 } else {
11282 serverBuilder_.setMessage(builderForValue.build());
11283 }
11284 bitField0_ |= 0x00000020;
11285 return this;
11286 }
11287
11288
11289
11290
11291
11292
11293
11294 public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
11295 if (serverBuilder_ == null) {
11296 if (((bitField0_ & 0x00000020) == 0x00000020) &&
11297 server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
11298 server_ =
11299 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial();
11300 } else {
11301 server_ = value;
11302 }
11303 onChanged();
11304 } else {
11305 serverBuilder_.mergeFrom(value);
11306 }
11307 bitField0_ |= 0x00000020;
11308 return this;
11309 }
11310
11311
11312
11313
11314
11315
11316
11317 public Builder clearServer() {
11318 if (serverBuilder_ == null) {
11319 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
11320 onChanged();
11321 } else {
11322 serverBuilder_.clear();
11323 }
11324 bitField0_ = (bitField0_ & ~0x00000020);
11325 return this;
11326 }
11327
11328
11329
11330
11331
11332
11333
11334 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() {
11335 bitField0_ |= 0x00000020;
11336 onChanged();
11337 return getServerFieldBuilder().getBuilder();
11338 }
11339
11340
11341
11342
11343
11344
11345
11346 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
11347 if (serverBuilder_ != null) {
11348 return serverBuilder_.getMessageOrBuilder();
11349 } else {
11350 return server_;
11351 }
11352 }
11353
11354
11355
11356
11357
11358
11359
11360 private com.google.protobuf.SingleFieldBuilder<
11361 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
11362 getServerFieldBuilder() {
11363 if (serverBuilder_ == null) {
11364 serverBuilder_ = new com.google.protobuf.SingleFieldBuilder<
11365 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
11366 server_,
11367 getParentForChildren(),
11368 isClean());
11369 server_ = null;
11370 }
11371 return serverBuilder_;
11372 }
11373
11374
11375 private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY;
11376
11377
11378
11379
11380
11381
11382
11383 public boolean hasRegionName() {
11384 return ((bitField0_ & 0x00000040) == 0x00000040);
11385 }
11386
11387
11388
11389
11390
11391
11392
11393 public com.google.protobuf.ByteString getRegionName() {
11394 return regionName_;
11395 }
11396
11397
11398
11399
11400
11401
11402
11403 public Builder setRegionName(com.google.protobuf.ByteString value) {
11404 if (value == null) {
11405 throw new NullPointerException();
11406 }
11407 bitField0_ |= 0x00000040;
11408 regionName_ = value;
11409 onChanged();
11410 return this;
11411 }
11412
11413
11414
11415
11416
11417
11418
11419 public Builder clearRegionName() {
11420 bitField0_ = (bitField0_ & ~0x00000040);
11421 regionName_ = getDefaultInstance().getRegionName();
11422 onChanged();
11423 return this;
11424 }
11425
11426
11427 }
11428
11429 static {
11430 defaultInstance = new RegionEventDescriptor(true);
11431 defaultInstance.initFields();
11432 }
11433
11434
11435 }
11436
11437 public interface WALTrailerOrBuilder
11438 extends com.google.protobuf.MessageOrBuilder {
11439 }
11440
11441
11442
11443
11444
11445
11446
11447
11448
11449
11450
11451 public static final class WALTrailer extends
11452 com.google.protobuf.GeneratedMessage
11453 implements WALTrailerOrBuilder {
11454
11455 private WALTrailer(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11456 super(builder);
11457 this.unknownFields = builder.getUnknownFields();
11458 }
11459 private WALTrailer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11460
11461 private static final WALTrailer defaultInstance;
11462 public static WALTrailer getDefaultInstance() {
11463 return defaultInstance;
11464 }
11465
11466 public WALTrailer getDefaultInstanceForType() {
11467 return defaultInstance;
11468 }
11469
11470 private final com.google.protobuf.UnknownFieldSet unknownFields;
11471 @java.lang.Override
11472 public final com.google.protobuf.UnknownFieldSet
11473 getUnknownFields() {
11474 return this.unknownFields;
11475 }
11476 private WALTrailer(
11477 com.google.protobuf.CodedInputStream input,
11478 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11479 throws com.google.protobuf.InvalidProtocolBufferException {
11480 initFields();
11481 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11482 com.google.protobuf.UnknownFieldSet.newBuilder();
11483 try {
11484 boolean done = false;
11485 while (!done) {
11486 int tag = input.readTag();
11487 switch (tag) {
11488 case 0:
11489 done = true;
11490 break;
11491 default: {
11492 if (!parseUnknownField(input, unknownFields,
11493 extensionRegistry, tag)) {
11494 done = true;
11495 }
11496 break;
11497 }
11498 }
11499 }
11500 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11501 throw e.setUnfinishedMessage(this);
11502 } catch (java.io.IOException e) {
11503 throw new com.google.protobuf.InvalidProtocolBufferException(
11504 e.getMessage()).setUnfinishedMessage(this);
11505 } finally {
11506 this.unknownFields = unknownFields.build();
11507 makeExtensionsImmutable();
11508 }
11509 }
11510 public static final com.google.protobuf.Descriptors.Descriptor
11511 getDescriptor() {
11512 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALTrailer_descriptor;
11513 }
11514
11515 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11516 internalGetFieldAccessorTable() {
11517 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALTrailer_fieldAccessorTable
11518 .ensureFieldAccessorsInitialized(
11519 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.Builder.class);
11520 }
11521
11522 public static com.google.protobuf.Parser<WALTrailer> PARSER =
11523 new com.google.protobuf.AbstractParser<WALTrailer>() {
11524 public WALTrailer parsePartialFrom(
11525 com.google.protobuf.CodedInputStream input,
11526 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11527 throws com.google.protobuf.InvalidProtocolBufferException {
11528 return new WALTrailer(input, extensionRegistry);
11529 }
11530 };
11531
11532 @java.lang.Override
11533 public com.google.protobuf.Parser<WALTrailer> getParserForType() {
11534 return PARSER;
11535 }
11536
11537 private void initFields() {
11538 }
11539 private byte memoizedIsInitialized = -1;
11540 public final boolean isInitialized() {
11541 byte isInitialized = memoizedIsInitialized;
11542 if (isInitialized != -1) return isInitialized == 1;
11543
11544 memoizedIsInitialized = 1;
11545 return true;
11546 }
11547
11548 public void writeTo(com.google.protobuf.CodedOutputStream output)
11549 throws java.io.IOException {
11550 getSerializedSize();
11551 getUnknownFields().writeTo(output);
11552 }
11553
11554 private int memoizedSerializedSize = -1;
11555 public int getSerializedSize() {
11556 int size = memoizedSerializedSize;
11557 if (size != -1) return size;
11558
11559 size = 0;
11560 size += getUnknownFields().getSerializedSize();
11561 memoizedSerializedSize = size;
11562 return size;
11563 }
11564
11565 private static final long serialVersionUID = 0L;
11566 @java.lang.Override
11567 protected java.lang.Object writeReplace()
11568 throws java.io.ObjectStreamException {
11569 return super.writeReplace();
11570 }
11571
11572 @java.lang.Override
11573 public boolean equals(final java.lang.Object obj) {
11574 if (obj == this) {
11575 return true;
11576 }
11577 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer)) {
11578 return super.equals(obj);
11579 }
11580 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) obj;
11581
11582 boolean result = true;
11583 result = result &&
11584 getUnknownFields().equals(other.getUnknownFields());
11585 return result;
11586 }
11587
11588 private int memoizedHashCode = 0;
11589 @java.lang.Override
11590 public int hashCode() {
11591 if (memoizedHashCode != 0) {
11592 return memoizedHashCode;
11593 }
11594 int hash = 41;
11595 hash = (19 * hash) + getDescriptorForType().hashCode();
11596 hash = (29 * hash) + getUnknownFields().hashCode();
11597 memoizedHashCode = hash;
11598 return hash;
11599 }
11600
11601 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11602 com.google.protobuf.ByteString data)
11603 throws com.google.protobuf.InvalidProtocolBufferException {
11604 return PARSER.parseFrom(data);
11605 }
11606 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11607 com.google.protobuf.ByteString data,
11608 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11609 throws com.google.protobuf.InvalidProtocolBufferException {
11610 return PARSER.parseFrom(data, extensionRegistry);
11611 }
11612 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(byte[] data)
11613 throws com.google.protobuf.InvalidProtocolBufferException {
11614 return PARSER.parseFrom(data);
11615 }
11616 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11617 byte[] data,
11618 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11619 throws com.google.protobuf.InvalidProtocolBufferException {
11620 return PARSER.parseFrom(data, extensionRegistry);
11621 }
11622 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(java.io.InputStream input)
11623 throws java.io.IOException {
11624 return PARSER.parseFrom(input);
11625 }
11626 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11627 java.io.InputStream input,
11628 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11629 throws java.io.IOException {
11630 return PARSER.parseFrom(input, extensionRegistry);
11631 }
11632 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom(java.io.InputStream input)
11633 throws java.io.IOException {
11634 return PARSER.parseDelimitedFrom(input);
11635 }
11636 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom(
11637 java.io.InputStream input,
11638 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11639 throws java.io.IOException {
11640 return PARSER.parseDelimitedFrom(input, extensionRegistry);
11641 }
11642 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11643 com.google.protobuf.CodedInputStream input)
11644 throws java.io.IOException {
11645 return PARSER.parseFrom(input);
11646 }
11647 public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11648 com.google.protobuf.CodedInputStream input,
11649 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11650 throws java.io.IOException {
11651 return PARSER.parseFrom(input, extensionRegistry);
11652 }
11653
11654 public static Builder newBuilder() { return Builder.create(); }
11655 public Builder newBuilderForType() { return newBuilder(); }
11656 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer prototype) {
11657 return newBuilder().mergeFrom(prototype);
11658 }
11659 public Builder toBuilder() { return newBuilder(this); }
11660
11661 @java.lang.Override
11662 protected Builder newBuilderForType(
11663 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11664 Builder builder = new Builder(parent);
11665 return builder;
11666 }
11667
11668
11669
11670
11671
11672
11673
11674
11675
11676
11677
11678 public static final class Builder extends
11679 com.google.protobuf.GeneratedMessage.Builder<Builder>
11680 implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailerOrBuilder {
11681 public static final com.google.protobuf.Descriptors.Descriptor
11682 getDescriptor() {
11683 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALTrailer_descriptor;
11684 }
11685
11686 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11687 internalGetFieldAccessorTable() {
11688 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALTrailer_fieldAccessorTable
11689 .ensureFieldAccessorsInitialized(
11690 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.Builder.class);
11691 }
11692
11693
11694 private Builder() {
11695 maybeForceBuilderInitialization();
11696 }
11697
11698 private Builder(
11699 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11700 super(parent);
11701 maybeForceBuilderInitialization();
11702 }
11703 private void maybeForceBuilderInitialization() {
11704 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11705 }
11706 }
11707 private static Builder create() {
11708 return new Builder();
11709 }
11710
11711 public Builder clear() {
11712 super.clear();
11713 return this;
11714 }
11715
11716 public Builder clone() {
11717 return create().mergeFrom(buildPartial());
11718 }
11719
11720 public com.google.protobuf.Descriptors.Descriptor
11721 getDescriptorForType() {
11722 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALTrailer_descriptor;
11723 }
11724
11725 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer getDefaultInstanceForType() {
11726 return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance();
11727 }
11728
11729 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer build() {
11730 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer result = buildPartial();
11731 if (!result.isInitialized()) {
11732 throw newUninitializedMessageException(result);
11733 }
11734 return result;
11735 }
11736
11737 public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer buildPartial() {
11738 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer(this);
11739 onBuilt();
11740 return result;
11741 }
11742
11743 public Builder mergeFrom(com.google.protobuf.Message other) {
11744 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) {
11745 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer)other);
11746 } else {
11747 super.mergeFrom(other);
11748 return this;
11749 }
11750 }
11751
11752 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer other) {
11753 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance()) return this;
11754 this.mergeUnknownFields(other.getUnknownFields());
11755 return this;
11756 }
11757
11758 public final boolean isInitialized() {
11759 return true;
11760 }
11761
11762 public Builder mergeFrom(
11763 com.google.protobuf.CodedInputStream input,
11764 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11765 throws java.io.IOException {
11766 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parsedMessage = null;
11767 try {
11768 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11769 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11770 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) e.getUnfinishedMessage();
11771 throw e;
11772 } finally {
11773 if (parsedMessage != null) {
11774 mergeFrom(parsedMessage);
11775 }
11776 }
11777 return this;
11778 }
11779
11780
11781 }
11782
11783 static {
11784 defaultInstance = new WALTrailer(true);
11785 defaultInstance.initFields();
11786 }
11787
11788
11789 }
11790
11791 private static com.google.protobuf.Descriptors.Descriptor
11792 internal_static_WALHeader_descriptor;
11793 private static
11794 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11795 internal_static_WALHeader_fieldAccessorTable;
11796 private static com.google.protobuf.Descriptors.Descriptor
11797 internal_static_WALKey_descriptor;
11798 private static
11799 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11800 internal_static_WALKey_fieldAccessorTable;
11801 private static com.google.protobuf.Descriptors.Descriptor
11802 internal_static_FamilyScope_descriptor;
11803 private static
11804 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11805 internal_static_FamilyScope_fieldAccessorTable;
11806 private static com.google.protobuf.Descriptors.Descriptor
11807 internal_static_CompactionDescriptor_descriptor;
11808 private static
11809 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11810 internal_static_CompactionDescriptor_fieldAccessorTable;
11811 private static com.google.protobuf.Descriptors.Descriptor
11812 internal_static_FlushDescriptor_descriptor;
11813 private static
11814 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11815 internal_static_FlushDescriptor_fieldAccessorTable;
11816 private static com.google.protobuf.Descriptors.Descriptor
11817 internal_static_FlushDescriptor_StoreFlushDescriptor_descriptor;
11818 private static
11819 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11820 internal_static_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable;
11821 private static com.google.protobuf.Descriptors.Descriptor
11822 internal_static_StoreDescriptor_descriptor;
11823 private static
11824 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11825 internal_static_StoreDescriptor_fieldAccessorTable;
11826 private static com.google.protobuf.Descriptors.Descriptor
11827 internal_static_BulkLoadDescriptor_descriptor;
11828 private static
11829 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11830 internal_static_BulkLoadDescriptor_fieldAccessorTable;
11831 private static com.google.protobuf.Descriptors.Descriptor
11832 internal_static_RegionEventDescriptor_descriptor;
11833 private static
11834 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11835 internal_static_RegionEventDescriptor_fieldAccessorTable;
11836 private static com.google.protobuf.Descriptors.Descriptor
11837 internal_static_WALTrailer_descriptor;
11838 private static
11839 com.google.protobuf.GeneratedMessage.FieldAccessorTable
11840 internal_static_WALTrailer_fieldAccessorTable;
11841
11842 public static com.google.protobuf.Descriptors.FileDescriptor
11843 getDescriptor() {
11844 return descriptor;
11845 }
11846 private static com.google.protobuf.Descriptors.FileDescriptor
11847 descriptor;
11848 static {
11849 java.lang.String[] descriptorData = {
11850 "\n\tWAL.proto\032\013HBase.proto\032\014Client.proto\"\217" +
11851 "\001\n\tWALHeader\022\027\n\017has_compression\030\001 \001(\010\022\026\n" +
11852 "\016encryption_key\030\002 \001(\014\022\033\n\023has_tag_compres" +
11853 "sion\030\003 \001(\010\022\027\n\017writer_cls_name\030\004 \001(\t\022\033\n\023c" +
11854 "ell_codec_cls_name\030\005 \001(\t\"\240\002\n\006WALKey\022\033\n\023e" +
11855 "ncoded_region_name\030\001 \002(\014\022\022\n\ntable_name\030\002" +
11856 " \002(\014\022\033\n\023log_sequence_number\030\003 \002(\004\022\022\n\nwri" +
11857 "te_time\030\004 \002(\004\022\035\n\ncluster_id\030\005 \001(\0132\005.UUID" +
11858 "B\002\030\001\022\034\n\006scopes\030\006 \003(\0132\014.FamilyScope\022\032\n\022fo" +
11859 "llowing_kv_count\030\007 \001(\r\022\032\n\013cluster_ids\030\010 ",
11860 "\003(\0132\005.UUID\022\022\n\nnonceGroup\030\t \001(\004\022\r\n\005nonce\030" +
11861 "\n \001(\004\022\034\n\024orig_sequence_number\030\013 \001(\004\"=\n\013F" +
11862 "amilyScope\022\016\n\006family\030\001 \002(\014\022\036\n\nscope_type" +
11863 "\030\002 \002(\0162\n.ScopeType\"\276\001\n\024CompactionDescrip" +
11864 "tor\022\022\n\ntable_name\030\001 \002(\014\022\033\n\023encoded_regio" +
11865 "n_name\030\002 \002(\014\022\023\n\013family_name\030\003 \002(\014\022\030\n\020com" +
11866 "paction_input\030\004 \003(\t\022\031\n\021compaction_output" +
11867 "\030\005 \003(\t\022\026\n\016store_home_dir\030\006 \002(\t\022\023\n\013region" +
11868 "_name\030\007 \001(\014\"\222\003\n\017FlushDescriptor\022,\n\006actio" +
11869 "n\030\001 \002(\0162\034.FlushDescriptor.FlushAction\022\022\n",
11870 "\ntable_name\030\002 \002(\014\022\033\n\023encoded_region_name" +
11871 "\030\003 \002(\014\022\035\n\025flush_sequence_number\030\004 \001(\004\022<\n" +
11872 "\rstore_flushes\030\005 \003(\0132%.FlushDescriptor.S" +
11873 "toreFlushDescriptor\022\023\n\013region_name\030\006 \001(\014" +
11874 "\032Y\n\024StoreFlushDescriptor\022\023\n\013family_name\030" +
11875 "\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t\022\024\n\014flush_o" +
11876 "utput\030\003 \003(\t\"S\n\013FlushAction\022\017\n\013START_FLUS" +
11877 "H\020\000\022\020\n\014COMMIT_FLUSH\020\001\022\017\n\013ABORT_FLUSH\020\002\022\020" +
11878 "\n\014CANNOT_FLUSH\020\003\"R\n\017StoreDescriptor\022\023\n\013f" +
11879 "amily_name\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t",
11880 "\022\022\n\nstore_file\030\003 \003(\t\"\215\001\n\022BulkLoadDescrip" +
11881 "tor\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022\033\n\023e" +
11882 "ncoded_region_name\030\002 \002(\014\022 \n\006stores\030\003 \003(\013" +
11883 "2\020.StoreDescriptor\022\030\n\020bulkload_seq_num\030\004" +
11884 " \002(\003\"\237\002\n\025RegionEventDescriptor\0224\n\nevent_" +
11885 "type\030\001 \002(\0162 .RegionEventDescriptor.Event" +
11886 "Type\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023encoded_regi" +
11887 "on_name\030\003 \002(\014\022\033\n\023log_sequence_number\030\004 \001" +
11888 "(\004\022 \n\006stores\030\005 \003(\0132\020.StoreDescriptor\022\033\n\006" +
11889 "server\030\006 \001(\0132\013.ServerName\022\023\n\013region_name",
11890 "\030\007 \001(\014\".\n\tEventType\022\017\n\013REGION_OPEN\020\000\022\020\n\014" +
11891 "REGION_CLOSE\020\001\"\014\n\nWALTrailer*F\n\tScopeTyp" +
11892 "e\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n\030REPLIC" +
11893 "ATION_SCOPE_GLOBAL\020\001B?\n*org.apache.hadoo" +
11894 "p.hbase.protobuf.generatedB\tWALProtosH\001\210" +
11895 "\001\000\240\001\001"
11896 };
11897 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
11898 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
11899 public com.google.protobuf.ExtensionRegistry assignDescriptors(
11900 com.google.protobuf.Descriptors.FileDescriptor root) {
11901 descriptor = root;
11902 internal_static_WALHeader_descriptor =
11903 getDescriptor().getMessageTypes().get(0);
11904 internal_static_WALHeader_fieldAccessorTable = new
11905 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11906 internal_static_WALHeader_descriptor,
11907 new java.lang.String[] { "HasCompression", "EncryptionKey", "HasTagCompression", "WriterClsName", "CellCodecClsName", });
11908 internal_static_WALKey_descriptor =
11909 getDescriptor().getMessageTypes().get(1);
11910 internal_static_WALKey_fieldAccessorTable = new
11911 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11912 internal_static_WALKey_descriptor,
11913 new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", "Scopes", "FollowingKvCount", "ClusterIds", "NonceGroup", "Nonce", "OrigSequenceNumber", });
11914 internal_static_FamilyScope_descriptor =
11915 getDescriptor().getMessageTypes().get(2);
11916 internal_static_FamilyScope_fieldAccessorTable = new
11917 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11918 internal_static_FamilyScope_descriptor,
11919 new java.lang.String[] { "Family", "ScopeType", });
11920 internal_static_CompactionDescriptor_descriptor =
11921 getDescriptor().getMessageTypes().get(3);
11922 internal_static_CompactionDescriptor_fieldAccessorTable = new
11923 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11924 internal_static_CompactionDescriptor_descriptor,
11925 new java.lang.String[] { "TableName", "EncodedRegionName", "FamilyName", "CompactionInput", "CompactionOutput", "StoreHomeDir", "RegionName", });
11926 internal_static_FlushDescriptor_descriptor =
11927 getDescriptor().getMessageTypes().get(4);
11928 internal_static_FlushDescriptor_fieldAccessorTable = new
11929 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11930 internal_static_FlushDescriptor_descriptor,
11931 new java.lang.String[] { "Action", "TableName", "EncodedRegionName", "FlushSequenceNumber", "StoreFlushes", "RegionName", });
11932 internal_static_FlushDescriptor_StoreFlushDescriptor_descriptor =
11933 internal_static_FlushDescriptor_descriptor.getNestedTypes().get(0);
11934 internal_static_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable = new
11935 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11936 internal_static_FlushDescriptor_StoreFlushDescriptor_descriptor,
11937 new java.lang.String[] { "FamilyName", "StoreHomeDir", "FlushOutput", });
11938 internal_static_StoreDescriptor_descriptor =
11939 getDescriptor().getMessageTypes().get(5);
11940 internal_static_StoreDescriptor_fieldAccessorTable = new
11941 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11942 internal_static_StoreDescriptor_descriptor,
11943 new java.lang.String[] { "FamilyName", "StoreHomeDir", "StoreFile", });
11944 internal_static_BulkLoadDescriptor_descriptor =
11945 getDescriptor().getMessageTypes().get(6);
11946 internal_static_BulkLoadDescriptor_fieldAccessorTable = new
11947 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11948 internal_static_BulkLoadDescriptor_descriptor,
11949 new java.lang.String[] { "TableName", "EncodedRegionName", "Stores", "BulkloadSeqNum", });
11950 internal_static_RegionEventDescriptor_descriptor =
11951 getDescriptor().getMessageTypes().get(7);
11952 internal_static_RegionEventDescriptor_fieldAccessorTable = new
11953 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11954 internal_static_RegionEventDescriptor_descriptor,
11955 new java.lang.String[] { "EventType", "TableName", "EncodedRegionName", "LogSequenceNumber", "Stores", "Server", "RegionName", });
11956 internal_static_WALTrailer_descriptor =
11957 getDescriptor().getMessageTypes().get(8);
11958 internal_static_WALTrailer_fieldAccessorTable = new
11959 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11960 internal_static_WALTrailer_descriptor,
11961 new java.lang.String[] { });
11962 return null;
11963 }
11964 };
11965 com.google.protobuf.Descriptors.FileDescriptor
11966 .internalBuildGeneratedFileFrom(descriptorData,
11967 new com.google.protobuf.Descriptors.FileDescriptor[] {
11968 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
11969 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
11970 }, assigner);
11971 }
11972
11973
11974 }